aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorchojoyce <chojoyce@google.com>2022-01-05 04:25:23 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2022-01-05 04:25:23 +0000
commit95950852f5aedfcd16f1637234ddc54bc475d714 (patch)
tree412a59ade78bcacc2449642d7cfd7ccb6a92d8e4
parentd4dcb747f8ac4df33c321b19ee1c511f6e47707e (diff)
parent4e81cd9cc2fd5700fad778754c7db0b34f9fe04f (diff)
downloadpython-api-core-95950852f5aedfcd16f1637234ddc54bc475d714.tar.gz
Merge platform/external/python/python-api-core v2.3.0 am: 4e81cd9cc2
Original change: https://android-review.googlesource.com/c/platform/external/python/python-api-core/+/1931601 Change-Id: I9e4c4f81d39325aed171d41ca6dcc9d4651049cc
-rw-r--r--.coveragerc13
-rw-r--r--.flake811
-rw-r--r--.github/.OwlBot.lock.yaml3
-rw-r--r--.github/.OwlBot.yaml19
-rw-r--r--.github/CODEOWNERS12
-rw-r--r--.github/CONTRIBUTING.md28
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md43
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md18
-rw-r--r--.github/ISSUE_TEMPLATE/support_request.md7
-rw-r--r--.github/PULL_REQUEST_TEMPLATE.md7
-rw-r--r--.github/header-checker-lint.yml15
-rw-r--r--.github/release-please.yml1
-rw-r--r--.github/snippet-bot.yml0
-rw-r--r--.github/sync-repo-settings.yaml7
-rw-r--r--.gitignore64
-rwxr-xr-x.kokoro/build.sh59
-rw-r--r--.kokoro/continuous/common.cfg27
-rw-r--r--.kokoro/continuous/continuous.cfg1
-rw-r--r--.kokoro/docker/docs/Dockerfile67
-rw-r--r--.kokoro/docs/common.cfg67
-rw-r--r--.kokoro/docs/docs-presubmit.cfg28
-rw-r--r--.kokoro/docs/docs.cfg1
-rwxr-xr-x.kokoro/populate-secrets.sh43
-rw-r--r--.kokoro/presubmit/common.cfg27
-rw-r--r--.kokoro/presubmit/presubmit.cfg1
-rwxr-xr-x.kokoro/publish-docs.sh64
-rwxr-xr-x.kokoro/release.sh32
-rw-r--r--.kokoro/release/common.cfg30
-rw-r--r--.kokoro/release/release.cfg1
-rw-r--r--.kokoro/samples/lint/common.cfg34
-rw-r--r--.kokoro/samples/lint/continuous.cfg6
-rw-r--r--.kokoro/samples/lint/periodic.cfg6
-rw-r--r--.kokoro/samples/lint/presubmit.cfg6
-rw-r--r--.kokoro/samples/python3.10/common.cfg40
-rw-r--r--.kokoro/samples/python3.10/continuous.cfg6
-rw-r--r--.kokoro/samples/python3.10/periodic-head.cfg11
-rw-r--r--.kokoro/samples/python3.10/periodic.cfg6
-rw-r--r--.kokoro/samples/python3.10/presubmit.cfg6
-rw-r--r--.kokoro/samples/python3.6/common.cfg40
-rw-r--r--.kokoro/samples/python3.6/continuous.cfg7
-rw-r--r--.kokoro/samples/python3.6/periodic-head.cfg11
-rw-r--r--.kokoro/samples/python3.6/periodic.cfg6
-rw-r--r--.kokoro/samples/python3.6/presubmit.cfg6
-rw-r--r--.kokoro/samples/python3.7/common.cfg40
-rw-r--r--.kokoro/samples/python3.7/continuous.cfg6
-rw-r--r--.kokoro/samples/python3.7/periodic-head.cfg11
-rw-r--r--.kokoro/samples/python3.7/periodic.cfg6
-rw-r--r--.kokoro/samples/python3.7/presubmit.cfg6
-rw-r--r--.kokoro/samples/python3.8/common.cfg40
-rw-r--r--.kokoro/samples/python3.8/continuous.cfg6
-rw-r--r--.kokoro/samples/python3.8/periodic-head.cfg11
-rw-r--r--.kokoro/samples/python3.8/periodic.cfg6
-rw-r--r--.kokoro/samples/python3.8/presubmit.cfg6
-rw-r--r--.kokoro/samples/python3.9/common.cfg40
-rw-r--r--.kokoro/samples/python3.9/continuous.cfg6
-rw-r--r--.kokoro/samples/python3.9/periodic-head.cfg11
-rw-r--r--.kokoro/samples/python3.9/periodic.cfg6
-rw-r--r--.kokoro/samples/python3.9/presubmit.cfg6
-rwxr-xr-x.kokoro/test-samples-against-head.sh26
-rwxr-xr-x.kokoro/test-samples-impl.sh102
-rwxr-xr-x.kokoro/test-samples.sh44
-rwxr-xr-x.kokoro/trampoline.sh28
-rwxr-xr-x.kokoro/trampoline_v2.sh487
-rw-r--r--.pre-commit-config.yaml31
-rw-r--r--.repo-metadata.json12
-rw-r--r--.trampolinerc63
-rw-r--r--Android.bp33
-rw-r--r--CHANGELOG.md726
-rw-r--r--CODE_OF_CONDUCT.md95
-rw-r--r--CONTRIBUTING.rst257
-rw-r--r--LICENSE202
-rw-r--r--MANIFEST.in25
-rw-r--r--METADATA18
-rw-r--r--MODULE_LICENSE_APACHE20
l---------NOTICE1
-rw-r--r--README.rst28
-rw-r--r--SECURITY.md7
-rw-r--r--docs/_static/custom.css20
-rw-r--r--docs/_templates/layout.html50
-rw-r--r--docs/auth.rst213
l---------docs/changelog.md1
-rw-r--r--docs/client_info.rst11
-rw-r--r--docs/client_options.rst6
-rw-r--r--docs/conf.py375
-rw-r--r--docs/exceptions.rst6
-rw-r--r--docs/futures.rst14
-rw-r--r--docs/helpers.rst26
-rw-r--r--docs/iam.rst7
-rw-r--r--docs/index.rst32
-rw-r--r--docs/multiprocessing.rst7
-rw-r--r--docs/operation.rst13
-rw-r--r--docs/operations_client.rst6
-rw-r--r--docs/page_iterator.rst13
-rw-r--r--docs/path_template.rst6
-rw-r--r--docs/retry.rst13
-rw-r--r--docs/timeout.rst6
-rw-r--r--google/__init__.py25
-rw-r--r--google/api_core/__init__.py22
-rw-r--r--google/api_core/bidi.py735
-rw-r--r--google/api_core/client_info.py107
-rw-r--r--google/api_core/client_options.py116
-rw-r--r--google/api_core/datetime_helpers.py298
-rw-r--r--google/api_core/exceptions.py546
-rw-r--r--google/api_core/future/__init__.py19
-rw-r--r--google/api_core/future/_helpers.py39
-rw-r--r--google/api_core/future/async_future.py162
-rw-r--r--google/api_core/future/base.py64
-rw-r--r--google/api_core/future/polling.py193
-rw-r--r--google/api_core/gapic_v1/__init__.py29
-rw-r--r--google/api_core/gapic_v1/client_info.py55
-rw-r--r--google/api_core/gapic_v1/config.py166
-rw-r--r--google/api_core/gapic_v1/config_async.py42
-rw-r--r--google/api_core/gapic_v1/method.py253
-rw-r--r--google/api_core/gapic_v1/method_async.py48
-rw-r--r--google/api_core/gapic_v1/routing_header.py57
-rw-r--r--google/api_core/general_helpers.py16
-rw-r--r--google/api_core/grpc_helpers.py495
-rw-r--r--google/api_core/grpc_helpers_async.py297
-rw-r--r--google/api_core/iam.py427
-rw-r--r--google/api_core/operation.py351
-rw-r--r--google/api_core/operation_async.py221
-rw-r--r--google/api_core/operations_v1/__init__.py27
-rw-r--r--google/api_core/operations_v1/abstract_operations_client.py564
-rw-r--r--google/api_core/operations_v1/operations_async_client.py322
-rw-r--r--google/api_core/operations_v1/operations_client.py332
-rw-r--r--google/api_core/operations_v1/operations_client_config.py59
-rw-r--r--google/api_core/operations_v1/pagers.py86
-rw-r--r--google/api_core/operations_v1/transports/__init__.py30
-rw-r--r--google/api_core/operations_v1/transports/base.py232
-rw-r--r--google/api_core/operations_v1/transports/rest.py455
-rw-r--r--google/api_core/page_iterator.py571
-rw-r--r--google/api_core/page_iterator_async.py285
-rw-r--r--google/api_core/path_template.py300
-rw-r--r--google/api_core/protobuf_helpers.py373
-rw-r--r--google/api_core/py.typed2
-rw-r--r--google/api_core/rest_helpers.py94
-rw-r--r--google/api_core/retry.py366
-rw-r--r--google/api_core/retry_async.py291
-rw-r--r--google/api_core/timeout.py220
-rw-r--r--google/api_core/version.py15
-rw-r--r--mypy.ini4
-rw-r--r--noxfile.py245
-rw-r--r--owlbot.py47
-rw-r--r--renovate.json12
-rwxr-xr-xscripts/decrypt-secrets.sh46
-rw-r--r--scripts/readme-gen/readme_gen.py66
-rw-r--r--scripts/readme-gen/templates/README.tmpl.rst87
-rw-r--r--scripts/readme-gen/templates/auth.tmpl.rst9
-rw-r--r--scripts/readme-gen/templates/auth_api_key.tmpl.rst14
-rw-r--r--scripts/readme-gen/templates/install_deps.tmpl.rst29
-rw-r--r--scripts/readme-gen/templates/install_portaudio.tmpl.rst35
-rw-r--r--setup.cfg12
-rw-r--r--setup.py102
-rw-r--r--testing/.gitignore3
-rw-r--r--testing/constraints-3.10.txt0
-rw-r--r--testing/constraints-3.11.txt0
-rw-r--r--testing/constraints-3.6.txt17
-rw-r--r--testing/constraints-3.7.txt0
-rw-r--r--testing/constraints-3.8.txt0
-rw-r--r--testing/constraints-3.9.txt0
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/asyncio/__init__.py0
-rw-r--r--tests/asyncio/future/__init__.py0
-rw-r--r--tests/asyncio/future/test_async_future.py228
-rw-r--r--tests/asyncio/gapic/test_config_async.py95
-rw-r--r--tests/asyncio/gapic/test_method_async.py248
-rw-r--r--tests/asyncio/operations_v1/__init__.py0
-rw-r--r--tests/asyncio/operations_v1/test_operations_async_client.py113
-rw-r--r--tests/asyncio/test_grpc_helpers_async.py589
-rw-r--r--tests/asyncio/test_operation_async.py201
-rw-r--r--tests/asyncio/test_page_iterator_async.py290
-rw-r--r--tests/asyncio/test_retry_async.py403
-rw-r--r--tests/unit/__init__.py0
-rw-r--r--tests/unit/future/__init__.py0
-rw-r--r--tests/unit/future/test__helpers.py37
-rw-r--r--tests/unit/future/test_polling.py242
-rw-r--r--tests/unit/gapic/test_client_info.py31
-rw-r--r--tests/unit/gapic/test_config.py94
-rw-r--r--tests/unit/gapic/test_method.py244
-rw-r--r--tests/unit/gapic/test_routing_header.py41
-rw-r--r--tests/unit/operations_v1/__init__.py0
-rw-r--r--tests/unit/operations_v1/test_operations_client.py98
-rw-r--r--tests/unit/operations_v1/test_operations_rest_client.py944
-rw-r--r--tests/unit/test_bidi.py869
-rw-r--r--tests/unit/test_client_info.py98
-rw-r--r--tests/unit/test_client_options.py117
-rw-r--r--tests/unit/test_datetime_helpers.py396
-rw-r--r--tests/unit/test_exceptions.py353
-rw-r--r--tests/unit/test_grpc_helpers.py860
-rw-r--r--tests/unit/test_iam.py382
-rw-r--r--tests/unit/test_operation.py326
-rw-r--r--tests/unit/test_page_iterator.py665
-rw-r--r--tests/unit/test_path_template.py389
-rw-r--r--tests/unit/test_protobuf_helpers.py518
-rw-r--r--tests/unit/test_rest_helpers.py77
-rw-r--r--tests/unit/test_retry.py458
-rw-r--r--tests/unit/test_timeout.py129
197 files changed, 23774 insertions, 0 deletions
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..d097511
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,13 @@
+[run]
+branch = True
+
+[report]
+fail_under = 100
+show_missing = True
+exclude_lines =
+ # Re-enable the standard pragma
+ pragma: NO COVER
+ # Ignore debug-only repr
+ def __repr__
+ # Ignore abstract methods
+ raise NotImplementedError
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..3da787c
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,11 @@
+[flake8]
+import-order-style=google
+# Note: this forces all google imports to be in the third group. See
+# https://github.com/PyCQA/flake8-import-order/issues/111
+application-import-names=google
+ignore = E203, E266, E501, W503
+exclude =
+ __pycache__,
+ .git,
+ *.pyc,
+ conf.py
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
new file mode 100644
index 0000000..7519fa3
--- /dev/null
+++ b/.github/.OwlBot.lock.yaml
@@ -0,0 +1,3 @@
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
new file mode 100644
index 0000000..c8b40cc
--- /dev/null
+++ b/.github/.OwlBot.yaml
@@ -0,0 +1,19 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+docker:
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+
+begin-after-commit-hash: 7af2cb8b2b725641ac0d07e2f256d453682802e6
+
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..ee81891
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,12 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+#
+# For syntax help see:
+# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
+
+# @googleapis/yoshi-python @googleapis/actools-python are the default owners for changes in this repo
+* @googleapis/yoshi-python @googleapis/actools-python
+
+# @googleapis/python-samples-owners @googleapis/actools-python are the default owners for samples changes
+/samples/ @googleapis/python-samples-owners @googleapis/actools-python
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 0000000..939e534
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to <https://cla.developers.google.com/> to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+
+## Community Guidelines
+
+This project follows [Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..ee19f10
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,43 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+Please run down the following list and make sure you've tried the usual "quick fixes":
+
+ - Search the issues already opened: https://github.com/googleapis/python-api-core/issues
+ - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python
+
+If you are still having issues, please be sure to include as much information as possible:
+
+#### Environment details
+
+ - OS type and version:
+ - Python version: `python --version`
+ - pip version: `pip --version`
+ - `google-api-core` version: `pip show google-api-core`
+
+#### Steps to reproduce
+
+ 1. ?
+ 2. ?
+
+#### Code example
+
+```python
+# example
+```
+
+#### Stack trace
+```
+# example
+```
+
+Making sure to follow these steps will guarantee the quickest resolution possible.
+
+Thanks!
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..6365857
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,18 @@
+---
+name: Feature request
+about: Suggest an idea for this library
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+ **Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+ **Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+ **Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+ **Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md
new file mode 100644
index 0000000..9958690
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/support_request.md
@@ -0,0 +1,7 @@
+---
+name: Support request
+about: If you have a support contract with Google, please create an issue in the Google Cloud Support console.
+
+---
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..4ca8093
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,7 @@
+Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
+- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-api-core/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea
+- [ ] Ensure the tests and linter pass
+- [ ] Code coverage does not decrease (if any source code was changed)
+- [ ] Appropriate docs were updated (if necessary)
+
+Fixes #<issue_number_goes_here> 🦕
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000..6fe78aa
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+} \ No newline at end of file
diff --git a/.github/release-please.yml b/.github/release-please.yml
new file mode 100644
index 0000000..4507ad0
--- /dev/null
+++ b/.github/release-please.yml
@@ -0,0 +1 @@
+releaseType: python
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/.github/snippet-bot.yml
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
new file mode 100644
index 0000000..e621885
--- /dev/null
+++ b/.github/sync-repo-settings.yaml
@@ -0,0 +1,7 @@
+permissionRules:
+ - team: actools-python
+ permission: admin
+ - team: actools
+ permission: admin
+ - team: yoshi-python
+ permission: push
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..99c3a14
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,64 @@
+*.py[cod]
+*.sw[op]
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+.eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+__pycache__
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.nox
+.cache
+.pytest_cache
+.pytype
+
+
+# Mac
+.DS_Store
+
+# JetBrains
+.idea
+
+# VS Code
+.vscode
+
+# emacs
+*~
+
+# Built documentation
+docs/_build
+bigquery/docs/generated
+docs.metadata
+
+# Virtual environment
+env/
+
+# Test logs
+coverage.xml
+*sponge_log.xml
+
+# System test environment variables.
+system_tests/local_test_setup
+
+# Make sure a generated file isn't accidentally committed.
+pylintrc
+pylintrc.test
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
new file mode 100755
index 0000000..0394c8a
--- /dev/null
+++ b/.kokoro/build.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-api-core"
+fi
+
+cd "${PROJECT_ROOT}"
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Setup service account credentials.
+export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+
+# Setup project id.
+export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+
+# Remove old nox
+python3 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
+
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3 -m nox -s ${NOX_SESSION:-}
+else
+ python3 -m nox
+fi
diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg
new file mode 100644
index 0000000..9f2fa73
--- /dev/null
+++ b/.kokoro/continuous/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/build.sh"
+}
diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg
new file mode 100644
index 0000000..8f43917
--- /dev/null
+++ b/.kokoro/continuous/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 0000000..4e1b1fb
--- /dev/null
+++ b/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,67 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ python3-distutils \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.8 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+CMD ["python3.8"]
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
new file mode 100644
index 0000000..48e8985
--- /dev/null
+++ b/.kokoro/docs/common.cfg
@@ -0,0 +1,67 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/publish-docs.sh"
+}
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "docs-staging"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ # Push non-cloud library docs to `docs-staging-v2-staging` instead of the
+ # Cloud RAD bucket `docs-staging-v2`
+ value: "docs-staging-v2-staging"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "docuploader_service_account"
+ }
+ }
+} \ No newline at end of file
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 0000000..d1ed51e
--- /dev/null
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,28 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
new file mode 100644
index 0000000..8f43917
--- /dev/null
+++ b/.kokoro/docs/docs.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 0000000..f525142
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg
new file mode 100644
index 0000000..9f2fa73
--- /dev/null
+++ b/.kokoro/presubmit/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg
new file mode 100644
index 0000000..8f43917
--- /dev/null
+++ b/.kokoro/presubmit/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
new file mode 100755
index 0000000..8acb14e
--- /dev/null
+++ b/.kokoro/publish-docs.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+export PATH="${HOME}/.local/bin:${PATH}"
+
+# Install nox
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
+
+# build docs
+nox -s docs
+
+python3 -m pip install --user gcp-docuploader
+
+# create metadata
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
new file mode 100755
index 0000000..0728ce1
--- /dev/null
+++ b/.kokoro/release.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Start the releasetool reporter
+python3 -m pip install gcp-releasetool
+python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
+
+# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
+python3 -m pip install --upgrade twine wheel setuptools
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Move into the package, build the distribution and upload.
+TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token")
+cd github/python-api-core
+python3 setup.py sdist bdist_wheel
+twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
new file mode 100644
index 0000000..586e764
--- /dev/null
+++ b/.kokoro/release/common.cfg
@@ -0,0 +1,30 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/release.sh"
+}
+
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token"
+}
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
new file mode 100644
index 0000000..8f43917
--- /dev/null
+++ b/.kokoro/release/release.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 0000000..1a2b87b
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+} \ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg
new file mode 100644
index 0000000..40fb8d8
--- /dev/null
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.10"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-310"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file
diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.10/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
new file mode 100644
index 0000000..a18c0cf
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
new file mode 100644
index 0000000..71cd1e5
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 0000000..3bb6b3a
--- /dev/null
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.6"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 0000000..7218af1
--- /dev/null
+++ b/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 0000000..a18c0cf
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 0000000..71cd1e5
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 0000000..a3aa10b
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 0000000..a18c0cf
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 0000000..71cd1e5
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 0000000..20c941a
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 0000000..a18c0cf
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 0000000..71cd1e5
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 0000000..234887c
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 0000000..a18c0cf
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 0000000..71cd1e5
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+} \ No newline at end of file
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 0000000..ba3a707
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 0000000..8a324c9
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples don't exist
+if ! find samples -name 'requirements.txt' | grep -q .; then
+ echo "No tests run. './samples/**/requirements.txt' not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 0000000..11c042d
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
+ fi
+fi
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
new file mode 100755
index 0000000..f39236e
--- /dev/null
+++ b/.kokoro/trampoline.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
+
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
new file mode 100755
index 0000000..4af6cdc
--- /dev/null
+++ b/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For FlakyBot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..62eb5a7
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,31 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.0.1
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+- repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+- repo: https://gitlab.com/pycqa/flake8
+ rev: 3.9.2
+ hooks:
+ - id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
new file mode 100644
index 0000000..e16c9d2
--- /dev/null
+++ b/.repo-metadata.json
@@ -0,0 +1,12 @@
+{
+ "name": "google-api-core",
+ "name_pretty": "Google API client core library",
+ "client_documentation": "https://googleapis.dev/python/google-api-core/latest",
+ "release_level": "ga",
+ "language": "python",
+ "library_type": "CORE",
+ "repo": "googleapis/python-api-core",
+ "distribution_name": "google-api-core",
+ "default_version": "",
+ "codeowner_team": "@googleapis/actools-python"
+}
diff --git a/.trampolinerc b/.trampolinerc
new file mode 100644
index 0000000..0eee72a
--- /dev/null
+++ b/.trampolinerc
@@ -0,0 +1,63 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..b821a42
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,33 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+python_library {
+ name: "py-python-api-core",
+ host_supported: true,
+ srcs: [
+ "google/api_core/future/*.py",
+ "google/api_core/gapic_v1/*.py",
+ "google/api_core/operations_v1/*.py",
+ "google/api_core/operations_v1/transports/*.py",
+ "google/api_core/*.py",
+ ],
+ version: {
+ py2: {
+ enabled: true,
+ },
+ py3: {
+ enabled: true,
+ },
+ },
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..04d47da
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,726 @@
+# Changelog
+
+[PyPI History][1]
+
+[1]: https://pypi.org/project/google-api-core/#history
+
+## [2.3.0](https://www.github.com/googleapis/python-api-core/compare/v2.2.2...v2.3.0) (2021-11-25)
+
+
+### Features
+
+* add operations rest client to support long-running operations. ([#311](https://www.github.com/googleapis/python-api-core/issues/311)) ([ce1adf3](https://www.github.com/googleapis/python-api-core/commit/ce1adf395982ede157c0f25a920946bb52789873))
+
+
+### Bug Fixes
+
+* handle bare 'grpc.Call' in 'from_grpc_error' ([#298](https://www.github.com/googleapis/python-api-core/issues/298)) ([060b339](https://www.github.com/googleapis/python-api-core/commit/060b339e3af296dd1772bfc1b4a0d2b4264cae1f))
+
+### [2.2.2](https://www.github.com/googleapis/python-api-core/compare/v2.2.1...v2.2.2) (2021-11-02)
+
+
+### Bug Fixes
+
+* make 'gapic_v1.method.DEFAULT' a typed object ([#292](https://www.github.com/googleapis/python-api-core/issues/292)) ([ffc51f0](https://www.github.com/googleapis/python-api-core/commit/ffc51f03c7ce5d9f009ba859b8df385d52925578))
+
+### [2.2.1](https://www.github.com/googleapis/python-api-core/compare/v2.2.0...v2.2.1) (2021-10-26)
+
+
+### Bug Fixes
+
+* revert "fix: do not error on LROs with no response or error" ([#294](https://www.github.com/googleapis/python-api-core/issues/294)) ([9e6091e](https://www.github.com/googleapis/python-api-core/commit/9e6091ee59a30e72a6278b369f6a08e7aef32f22))
+
+## [2.2.0](https://www.github.com/googleapis/python-api-core/compare/v2.1.1...v2.2.0) (2021-10-25)
+
+
+### Features
+
+* add 'GoogleAPICallError.error_details' property ([#286](https://www.github.com/googleapis/python-api-core/issues/286)) ([ef6f0fc](https://www.github.com/googleapis/python-api-core/commit/ef6f0fcfdfe771172056e35e3c990998b3b00416))
+
+### [2.1.1](https://www.github.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1) (2021-10-13)
+
+
+### Bug Fixes
+
+* add mypy checking + 'py.typed' file ([#290](https://www.github.com/googleapis/python-api-core/issues/290)) ([0023ee1](https://www.github.com/googleapis/python-api-core/commit/0023ee1fe0e8b80c7a9e8987e0f322a829e5d613))
+
+## [2.1.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.1...v2.1.0) (2021-10-05)
+
+
+### Features
+
+* add grpc transcoding + tests ([#259](https://www.github.com/googleapis/python-api-core/issues/259)) ([afe0fa1](https://www.github.com/googleapis/python-api-core/commit/afe0fa14c21289c8244606a9f81544cff8ac5f7c))
+* Add helper function to format query_params for rest transport. ([#275](https://www.github.com/googleapis/python-api-core/issues/275)) ([1c5eb4d](https://www.github.com/googleapis/python-api-core/commit/1c5eb4df93d78e791082d9282330ebf0faacd222))
+* add support for Python 3.10 ([#284](https://www.github.com/googleapis/python-api-core/issues/284)) ([a422a5d](https://www.github.com/googleapis/python-api-core/commit/a422a5d72cb6f363d57e7a4effe421ba8e049cde))
+
+### [2.0.1](https://www.github.com/googleapis/python-api-core/compare/v2.0.0...v2.0.1) (2021-08-31)
+
+
+### Bug Fixes
+
+* do not error on LROs with no response or error ([#258](https://www.github.com/googleapis/python-api-core/issues/258)) ([618f192](https://www.github.com/googleapis/python-api-core/commit/618f19201af729205892fcecd9c8e315ba3174a3))
+
+## [2.0.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.0-b1...v2.0.0) (2021-08-18)
+
+### âš  BREAKING CHANGES
+
+* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9))
+
+### Bug Fixes
+
+* bump grpcio version to use stable aio API ([#234](https://www.github.com/googleapis/python-api-core/issues/234)) ([bdbf889](https://www.github.com/googleapis/python-api-core/commit/bdbf889210b709d7c1945f2160bcba9161b4dd2e))
+* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
+
+## [2.0.0b1](https://www.github.com/googleapis/python-api-core/compare/v1.31.1...v2.0.0b1) (2021-08-03)
+
+
+### âš  BREAKING CHANGES
+
+* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9))
+
+### Bug Fixes
+
+* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
+
+### [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26)
+
+
+### Bug Fixes
+
+* add 'requests.exceptions.ChunkedEncodingError' to retryable exceptions ([#237](https://www.github.com/googleapis/python-api-core/issues/237)) ([5e540f2](https://www.github.com/googleapis/python-api-core/commit/5e540f28493cc3e13260458a8d1c6a1abb2ed313))
+
+
+### Documentation
+
+* add Samples section to CONTRIBUTING.rst ([#229](https://www.github.com/googleapis/python-api-core/issues/229)) ([a12c051](https://www.github.com/googleapis/python-api-core/commit/a12c0516c42918b05415835029717814353b883b))
+
+## [1.31.0](https://www.github.com/googleapis/python-api-core/compare/v1.30.0...v1.31.0) (2021-07-07)
+
+
+### Features
+
+* add ServiceUnavailable exception to polling retries ([#184](https://www.github.com/googleapis/python-api-core/issues/184)) ([11032cf](https://www.github.com/googleapis/python-api-core/commit/11032cf08ecc16dd252a6cda8b33b0b28ec4f4ba))
+
+
+### Bug Fixes
+
+* undprecate entity factory helpers ([#101](https://www.github.com/googleapis/python-api-core/issues/101)) ([1fbee03](https://www.github.com/googleapis/python-api-core/commit/1fbee03495a136eef3d6aaa5ea0aadd6e4b58e8b)), closes [#100](https://www.github.com/googleapis/python-api-core/issues/100)
+
+## [1.30.0](https://www.github.com/googleapis/python-api-core/compare/v1.29.0...v1.30.0) (2021-06-08)
+
+
+### Features
+
+* add iterator capability to paged iterators ([#200](https://www.github.com/googleapis/python-api-core/issues/200)) ([3487d68](https://www.github.com/googleapis/python-api-core/commit/3487d68bdab6f20e2ab931c8283f63c94862cf31))
+
+## [1.29.0](https://www.github.com/googleapis/python-api-core/compare/v1.28.0...v1.29.0) (2021-06-02)
+
+
+### Features
+
+* HTTPIterator now accepts a page_size parameter to control page … ([#197](https://www.github.com/googleapis/python-api-core/issues/197)) ([a421913](https://www.github.com/googleapis/python-api-core/commit/a4219137a5bfcf2a6f44780ecdbf475c1129e461))
+
+
+### Documentation
+
+* fix broken links in multiprocessing.rst ([#195](https://www.github.com/googleapis/python-api-core/issues/195)) ([8d8bc51](https://www.github.com/googleapis/python-api-core/commit/8d8bc5150ee5543b4aeb2c271da034a5305d1436))
+
+## [1.28.0](https://www.github.com/googleapis/python-api-core/compare/v1.27.0...v1.28.0) (2021-05-20)
+
+
+### Bug Fixes
+
+* require google-auth>=1.25.0 ([#190](https://www.github.com/googleapis/python-api-core/issues/190)) ([155da5e](https://www.github.com/googleapis/python-api-core/commit/155da5e18cc2fdcfa57de6f956b7d078e79cd4b7))
+
+
+### Miscellaneous Chores
+
+* release 1.28.0 ([#192](https://www.github.com/googleapis/python-api-core/issues/192)) ([11b5da4](https://www.github.com/googleapis/python-api-core/commit/11b5da426a842541ca2b861d3387fc312b3f5b60))
+
+## [1.27.0](https://www.github.com/googleapis/python-api-core/compare/v1.26.3...v1.27.0) (2021-05-18)
+
+
+### Features
+
+* Add support for `rest/` token in `x-goog-api-client` header ([#189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124))
+* retry google.auth TransportError and requests ConnectionError ([#178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334))
+
+### [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
+
+
+### Bug Fixes
+
+* skip empty policy bindings in `len()` and `iter()` ([#159](https://www.github.com/googleapis/python-api-core/issues/159)) ([9eaa786](https://www.github.com/googleapis/python-api-core/commit/9eaa7868164a7e98792de24d2be97f79fba22322))
+
+
+### Documentation
+
+* update python contributing guide ([#147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac))
+
+### [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
+
+
+### Bug Fixes
+
+* save empty IAM policy bindings ([#155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f))
+
+### [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
+
+
+### Bug Fixes
+
+* add operation name to x-goog-request-params in async client ([#137](https://www.github.com/googleapis/python-api-core/issues/137)) ([7271b23](https://www.github.com/googleapis/python-api-core/commit/7271b23afddb032e49e957525704d0cd5bfa4c65))
+
+## [1.26.0](https://www.github.com/googleapis/python-api-core/compare/v1.25.1...v1.26.0) (2021-02-08)
+
+
+### Features
+
+* allow default_host and default_scopes to be passed to create_channel ([#134](https://www.github.com/googleapis/python-api-core/issues/134)) ([94c76e0](https://www.github.com/googleapis/python-api-core/commit/94c76e0873e5b2f42331d5b1ad286c1e63b61395))
+
+### [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
+
+
+### Bug Fixes
+
+* add operation name to x-goog-request-params ([#133](https://www.github.com/googleapis/python-api-core/issues/133)) ([97cef4a](https://www.github.com/googleapis/python-api-core/commit/97cef4ad1db55938715f9ac8000d1b0ad1e71873))
+
+
+### Documentation
+
+* fix spelling errors in retry ([#131](https://www.github.com/googleapis/python-api-core/issues/131)) ([232dab0](https://www.github.com/googleapis/python-api-core/commit/232dab0ad3ef2cca0edfe707d8f90ca0ea200ba2))
+
+## [1.25.0](https://www.github.com/googleapis/python-api-core/compare/v1.24.1...v1.25.0) (2021-01-14)
+
+
+### Features
+
+* allow gRPC metadata to be passed to operations client ([#127](https://www.github.com/googleapis/python-api-core/issues/127)) ([73854e8](https://www.github.com/googleapis/python-api-core/commit/73854e897b885e9be290f2676a8a1466b4f041e4))
+
+
+### Documentation
+
+* **python:** document adding Python 3.9 support, dropping 3.5 support ([#120](https://www.github.com/googleapis/python-api-core/issues/120)) ([b51b7f5](https://www.github.com/googleapis/python-api-core/commit/b51b7f587042fe9340371c1b5c8e9adf8001c43a)), closes [#787](https://www.github.com/googleapis/python-api-core/issues/787)
+
+### [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
+
+
+### Bug Fixes
+
+* support 'retry' for ops built from HTTP/gRPC responses ([#115](https://www.github.com/googleapis/python-api-core/issues/115)) ([7a38243](https://www.github.com/googleapis/python-api-core/commit/7a38243c351b228d103eee81fc5ae521ad1c930e)), closes [#87](https://www.github.com/googleapis/python-api-core/issues/87)
+
+## [1.24.0](https://www.github.com/googleapis/python-api-core/compare/v1.23.0...v1.24.0) (2020-12-14)
+
+
+### Features
+
+* add support for Python 3.9, drop support for Python 3.5 ([#111](https://www.github.com/googleapis/python-api-core/issues/111)) ([fdbed0f](https://www.github.com/googleapis/python-api-core/commit/fdbed0f0cbae8de21c73338a6817f8aa79cef4c9)), closes [#110](https://www.github.com/googleapis/python-api-core/issues/110)
+
+
+### Documentation
+
+* explain how to create credentials from dict ([#109](https://www.github.com/googleapis/python-api-core/issues/109)) ([5dce6d6](https://www.github.com/googleapis/python-api-core/commit/5dce6d61e7324a415c1b3ceaeec1ce1b5f1ea189))
+
+## [1.23.0](https://www.github.com/googleapis/python-api-core/compare/v1.22.4...v1.23.0) (2020-10-16)
+
+
+### Features
+
+* **api-core:** pass retry from result() to done() ([#9](https://www.github.com/googleapis/python-api-core/issues/9)) ([6623b31](https://www.github.com/googleapis/python-api-core/commit/6623b31a2040b834be808d711fa397dc428f1837))
+
+
+### Bug Fixes
+
+* map LRO errors to library exception types ([#86](https://www.github.com/googleapis/python-api-core/issues/86)) ([a855339](https://www.github.com/googleapis/python-api-core/commit/a85533903c57be4809fe76435e298409e0903931)), closes [#15](https://www.github.com/googleapis/python-api-core/issues/15)
+* harden install to use full paths, and windows separators on windows ([#88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34))
+* update out-of-date comment in exceptions.py ([#93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265))
+
+### [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
+
+
+### Bug Fixes
+
+* use version.py instead of pkg_resources.get_distribution ([#80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47))
+
+### [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
+
+
+### Bug Fixes
+
+* **deps:** require six >= 1.13.0 ([#78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31)
+
+### [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
+
+
+### Bug Fixes
+
+* only add quota project id if supported ([#75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62))
+
+### [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
+
+
+### Documentation
+
+* fix spelling errors for amount in retry ([#69](https://www.github.com/googleapis/python-api-core/issues/69)) ([7bb713d](https://www.github.com/googleapis/python-api-core/commit/7bb713d13b1fe3cca58263f5e499136a84abc456))
+
+## [1.22.0](https://www.github.com/googleapis/python-api-core/compare/v1.21.0...v1.22.0) (2020-07-21)
+
+
+### Features
+
+* allow quota project to be passed to create_channel ([#58](https://www.github.com/googleapis/python-api-core/issues/58)) ([e2d9a7b](https://www.github.com/googleapis/python-api-core/commit/e2d9a7b209b7dfab300dc848fabbae8f42a2ab19))
+
+
+### Bug Fixes
+
+* _determine_timeout problem handling float type timeout ([#64](https://www.github.com/googleapis/python-api-core/issues/64)) ([2010373](https://www.github.com/googleapis/python-api-core/commit/2010373b27536d1191175624b297a709d70153fa))
+
+
+### Documentation
+
+* change the documentation for using 'six.moves.collections_abc.Mapping' instead of 'dict' in 'client_options.from_dict()' ([#53](https://www.github.com/googleapis/python-api-core/issues/53)) ([c890675](https://www.github.com/googleapis/python-api-core/commit/c890675dc9ebc084f105be81dc81c048f4f599ea))
+
+## [1.21.0](https://www.github.com/googleapis/python-api-core/compare/v1.20.1...v1.21.0) (2020-06-18)
+
+
+### Features
+
+* allow credentials files to be passed for channel creation ([#50](https://www.github.com/googleapis/python-api-core/issues/50)) ([ded92d0](https://www.github.com/googleapis/python-api-core/commit/ded92d0acdcde4295d0e5df05fda0d83783a3991))
+
+### [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
+
+
+### Bug Fixes
+
+* **dependencies:** increase protobuf version ([#49](https://www.github.com/googleapis/python-api-core/issues/49)) ([1ba6095](https://www.github.com/googleapis/python-api-core/commit/1ba609592968c9d828449b89a3ade3bcaf5edd7f)), closes [#48](https://www.github.com/googleapis/python-api-core/issues/48)
+
+## [1.20.0](https://www.github.com/googleapis/python-api-core/compare/v1.19.1...v1.20.0) (2020-06-09)
+
+
+### Features
+
+* allow disabling response stream pre-fetch ([#30](https://www.github.com/googleapis/python-api-core/issues/30)) ([74e0b0f](https://www.github.com/googleapis/python-api-core/commit/74e0b0f8387207933c120af15b2bb5d175dd8f84)), closes [#25](https://www.github.com/googleapis/python-api-core/issues/25)
+
+### [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
+
+
+### Bug Fixes
+
+* bump up grpcio minimum version to 1.29.0 ([#41](https://www.github.com/googleapis/python-api-core/issues/41)) ([4b11422](https://www.github.com/googleapis/python-api-core/commit/4b114221b3ae01eee540bedf47381c3b7c214b0c))
+
+## [1.19.0](https://www.github.com/googleapis/python-api-core/compare/v1.18.0...v1.19.0) (2020-06-05)
+
+
+### Features
+
+* **client_options:** add new client options 'quota_project_id', 'scopes', and 'credentials_file' ([a582936](https://www.github.com/googleapis/python-api-core/commit/a58293601d6da90c499d404e634a979a6cae9708))
+
+## [1.18.0](https://www.github.com/googleapis/python-api-core/compare/v1.17.0...v1.18.0) (2020-06-04)
+
+
+### Features
+
+* [CBT-6 helper] Exposing Retry._deadline as a property ([#20](https://www.github.com/googleapis/python-api-core/issues/20)) ([7be1e59](https://www.github.com/googleapis/python-api-core/commit/7be1e59e9d75c112f346d2b76dce3dd60e3584a1))
+* add client_encryped_cert_source to ClientOptions ([#31](https://www.github.com/googleapis/python-api-core/issues/31)) ([e4eaec0](https://www.github.com/googleapis/python-api-core/commit/e4eaec0ff255114138d3715280f86d34d861a6fa))
+* AsyncIO Integration [Part 2] ([#28](https://www.github.com/googleapis/python-api-core/issues/28)) ([dd9b2f3](https://www.github.com/googleapis/python-api-core/commit/dd9b2f38a70e85952cc05552ec8070cdf29ddbb4)), closes [#23](https://www.github.com/googleapis/python-api-core/issues/23)
+* First batch of AIO integration ([#26](https://www.github.com/googleapis/python-api-core/issues/26)) ([a82f289](https://www.github.com/googleapis/python-api-core/commit/a82f2892b8f219b82e120e6ed9f4070869c28be7))
+* third batch of AsyncIO integration ([#29](https://www.github.com/googleapis/python-api-core/issues/29)) ([7d8d580](https://www.github.com/googleapis/python-api-core/commit/7d8d58075a92e93662747d36a2d55b5e9f0943e1))
+
+## [1.17.0](https://www.github.com/googleapis/python-api-core/compare/v1.16.0...v1.17.0) (2020-04-14)
+
+
+### Features
+
+* **api_core:** add retry param into PollingFuture() and it's inheritors ([#9923](https://www.github.com/googleapis/python-api-core/issues/9923)) ([14f1f34](https://www.github.com/googleapis/python-api-core/commit/14f1f34e013c90fed2da2918625083d299fda557)), closes [#6197](https://www.github.com/googleapis/python-api-core/issues/6197)
+* **api-core:** add client_cert_source to ClientOptions ([#17](https://www.github.com/googleapis/python-api-core/issues/17)) ([748c935](https://www.github.com/googleapis/python-api-core/commit/748c935d4cf03a1f04fba9139c3c3150fd694d88))
+
+
+### Bug Fixes
+
+* consume part of StreamingResponseIterator to support failure while under a retry context ([#10206](https://www.github.com/googleapis/python-api-core/issues/10206)) ([2b103b6](https://www.github.com/googleapis/python-api-core/commit/2b103b60ece16a1e1bc98cfda7ec375191a90f75))
+
+## 1.16.0
+
+01-13-2020 14:19 PST
+
+### New Features
+
+- feat(storage): support optionsRequestedPolicyVersion ([#9989](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9989))
+- feat(api_core): support version 3 policy bindings ([#9869](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9869))
+
+## 1.15.0
+
+12-16-2019 15:27 PST
+
+### New Features
+- Make the last retry happen at deadline. ([#9873](https://github.com/googleapis/google-cloud-python/pull/9873))
+- Add a repr method for ClientOptions. ([#9849](https://github.com/googleapis/google-cloud-python/pull/9849))
+- Simplify `from_rfc3339` methods. ([#9641](https://github.com/googleapis/google-cloud-python/pull/9641))
+- Provide a `raw_page` field for `page_iterator.Page`. ([#9486](https://github.com/googleapis/google-cloud-python/pull/9486))
+
+### Documentation
+- Add Python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036))
+- Remove references to the old authentication credentials. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456))
+
+## 1.14.3
+
+10-07-2019 10:35 PDT
+
+
+### Implementation Changes
+- Finalize during close of 'ResumableBidiRpc' ([#9337](https://github.com/googleapis/google-cloud-python/pull/9337))
+- add on_error to Retry.__init__ ([#8892](https://github.com/googleapis/google-cloud-python/pull/8892))
+- Fix race in 'BackgroundConsumer._thread_main'. ([#8883](https://github.com/googleapis/google-cloud-python/pull/8883))
+
+### Documentation
+- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294))
+- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148))
+- About of time -> amount of time ([#9052](https://github.com/googleapis/google-cloud-python/pull/9052))
+- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035))
+
+### Internal / Testing Changes
+- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085))
+
+## 1.14.2
+
+07-30-2019 14:08 PDT
+
+
+### Documentation
+- Add client_options documentation. ([#8834](https://github.com/googleapis/google-cloud-python/pull/8834))
+
+## 1.14.1
+
+07-30-2019 12:24 PDT
+
+
+### Implementation Changes
+- Remove error log entry on clean BiDi shutdown. ([#8806](https://github.com/googleapis/google-cloud-python/pull/8806))
+- Forward 'timeout' arg from 'exception' to `_blocking_poll`. ([#8735](https://github.com/googleapis/google-cloud-python/pull/8735))
+
+### Documentation
+- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805))
+- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705))
+
+## 1.14.0
+
+07-17-2019 13:16 PDT
+
+
+### New Features
+- Firestore: Add `should_terminate` predicate for clean BiDi shutdown. ([#8650](https://github.com/googleapis/google-cloud-python/pull/8650))
+
+### Dependencies
+- Update pins of 'googleapis-common-protos. ([#8688](https://github.com/googleapis/google-cloud-python/pull/8688))
+
+### Documentation
+- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288))
+
+### Internal / Testing Changes
+- All: Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464))
+
+## 1.13.0
+
+06-24-2019 10:34 PDT
+
+### New Features
+- Add `client_options.ClientOptions` object. ([#8265](https://github.com/googleapis/google-cloud-python/pull/8265))
+
+## 1.12.0
+
+06-18-2019 12:37 PDT
+
+
+### New Features
+- Add Throttling to Bidi Reopening. Mitigates ResumableBidiRpc consuming 100% CPU ([#8193](https://github.com/googleapis/google-cloud-python/pull/8193))
+
+## 1.11.1
+
+05-28-2019 11:19 PDT
+
+
+### Implementation Changes
+- Classify 503 Service Unavailable errors as transient. ([#8182](https://github.com/googleapis/google-cloud-python/pull/8182))
+
+### Dependencies
+- Pin `grpcio < 2.0dev`. ([#8182](https://github.com/googleapis/google-cloud-python/pull/8182))
+
+### Internal / Testing Changes
+- Add parameterized test for `from_rfc3339` with nanos ([#7675](https://github.com/googleapis/google-cloud-python/pull/7675))
+- Unbreak pytype by silencing a false positive. ([#8106](https://github.com/googleapis/google-cloud-python/pull/8106))
+
+## 1.11.0
+
+05-15-2019 10:29 PDT
+
+### New Features
+
+- Refactor 'client_info' support. ([#7849](https://github.com/googleapis/google-cloud-python/pull/7849))
+
+## 1.10.0
+
+04-29-2019 10:12 PDT
+
+### Implementation Changes
+
+- Append leading zeros for nanosecond precision DateTimes
+ ([#7663](https://github.com/googleapis/google-cloud-python/pull/7663))
+
+### New Features
+
+- Add `user_agent` property to `ClientInfo`
+ ([#7799](https://github.com/googleapis/google-cloud-python/pull/7799))
+
+## 1.9.0
+
+04-05-2019 10:38 PDT
+
+
+### Implementation Changes
+- Allow passing metadata as part of creating a bidi ([#7514](https://github.com/googleapis/google-cloud-python/pull/7514))
+
+### Internal / Testing Changes
+- Update setup.py
+- API Core: specify a pytype output directory in setup.cfg. ([#7639](https://github.com/googleapis/google-cloud-python/pull/7639))
+
+## 1.8.2
+
+03-22-2019 16:27 PDT
+
+
+### Implementation Changes
+- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535))
+
+### Internal / Testing Changes
+- When re-opening a `ResumableBidiRPC` set `_request_queue_generator` to `None`. ([#7548](https://github.com/googleapis/google-cloud-python/pull/7548))
+
+## 1.8.1
+
+03-12-2019 12:45 PDT
+
+### Implementation Changes
+- Protect the creation of a background thread in BackgroundConsumer and wait on it starting. ([#7499](https://github.com/googleapis/google-cloud-python/pull/7499))
+
+## 1.8.0
+
+02-23-2019 15:46 PST
+
+
+### New Features
+- Add support to unwrap Anys into wrapped pb2 objects. ([#7430](https://github.com/googleapis/google-cloud-python/pull/7430))
+- Add `Operation.deserialize`. ([#7427](https://github.com/googleapis/google-cloud-python/pull/7427))
+
+### Documentation
+- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307))
+
+### Internal / Testing Changes
+- Fix new lint failure. ([#7382](https://github.com/googleapis/google-cloud-python/pull/7382))
+
+## 1.7.0
+
+12-17-2018 13:56 PST
+
+### New Features
+- Support converting `DatetimeWithNanos` to / from `google.protobuf.timestamp_pb2.Timestamp`. ([#6919](https://github.com/googleapis/google-cloud-python/pull/6919))
+
+### Documentation
+- Document Python 2 deprecation. ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910))
+- Add usage example for `google.api_core.iam.Polcy`. ([#6855](https://github.com/googleapis/google-cloud-python/pull/6855))
+
+### Internal / Testing Changes
+- Work around pytype big for `ABCMeta.register`. ([#6873](https://github.com/googleapis/google-cloud-python/pull/6873))
+
+## 1.6.0
+
+11-30-2018 12:45 PST
+
+
+### Implementation Changes
+- Import stdlib ABCs from 'collections.abc' rather than 'collections'. ([#6451](https://github.com/googleapis/google-cloud-python/pull/6451))
+
+### New Features
+- Move google.cloud.iam (core) to google.api_core.iam ([#6740](https://github.com/googleapis/google-cloud-python/pull/6740))
+- Add bidi support to api_core. ([#6191](https://github.com/googleapis/google-cloud-python/pull/6191))
+
+### Documentation
+- Fix typo ([#6532](https://github.com/googleapis/google-cloud-python/pull/6532))
+
+### Internal / Testing Changes
+- blacken api_core and core ([#6668](https://github.com/googleapis/google-cloud-python/pull/6668))
+
+## 1.5.2
+
+11-09-2018 14:22 PST
+
+
+### Implementation Changes
+- Retry transient errors in 'PollingFuture.result'. ([#6305](https://github.com/googleapis/google-cloud-python/pull/6305))
+
+### Dependencies
+- Remove hyphen from named extra in api_core. ([#6468](https://github.com/googleapis/google-cloud-python/pull/6468))
+- Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391))
+- Avoid broken 'google-common-apis 1.5.4' release. ([#6355](https://github.com/googleapis/google-cloud-python/pull/6355))
+
+## 1.5.1
+
+10-29-2018 13:29 PDT
+
+### Implementation Changes
+- Don't URL-encode slashes in gRPC request headers. ([#6310](https://github.com/googleapis/google-cloud-python/pull/6310))
+
+### Internal / Testing Changes
+- Back out changes from [#6267](https://github.com/googleapis/google-cloud-python/pull/6267) / `api_core-1.6.0a1` release. ([#6328](https://github.com/googleapis/google-cloud-python/pull/6328))
+
+## 1.5.0
+
+### New Features
+- Add bidi, Bidirection Streaming, to api-core ([#6211](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6211))
+
+### Internal / Testing Changes
+- Use new Nox ([#6175](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6175))
+
+## 1.4.1
+
+### Dependencies
+- Pin minimum protobuf dependency to 3.4.0. ([#6132](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6132))
+
+### Internal / Testing Changes
+- Add type-checking via pytype to api_core. ([#6116](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6116))
+
+## 1.4.0
+
+### Dependencies
+
+- Add support for gRPC connection management (available when using optional grpc_gcp dependency) ([#5553](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5553)) ([#5904](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5904))
+- Update classifiers to drop Python 3.4 and add Python 3.7 ([#5702](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5702))
+
+## 1.3.0
+
+### New Features
+
+- Add protobuf_helpers.field_mask to calculate a field mask from two messages (#5320)
+
+## 1.2.1
+
+### Implementation Changes
+- Make client_info work without gRPC installed. (#5075)
+- Rename `x-goog-header-params` to `x-goog-request-params` (#5495)
+
+## 1.2.0
+
+### Implementation Changes
+- Add close method to grpc Channel (#5333)
+
+### Internal / Testing Changes
+- Fix tests after grpcio update (#5333)
+- Add Test runs for Python 3.7 and remove 3.4 (#5295)
+
+## 1.1.2
+
+### Packaging
+- Update setuptools before packaging (#5265)
+
+## 1.1.1
+
+### Internal / Testing Changes
+- Use `install_requires` for platform dependencies instead of `extras_require` (#4991)
+- Update trove classifer to '5 - Production/Stable'
+
+## 1.1.0
+
+### Interface additions
+
+- Add `datetime_helpers.DatetimeWithNanoSeconds` (#4979)
+
+### Implementation changes
+
+- Use a class to wrap grpc streaming errors instead of monkey-patching (#4995)
+
+## 1.0.0
+
+This is the stable v1.0.0 release of google-api-core for Python. Releases after
+this will not contain breaking changes.
+
+### Interface changes and additions
+
+- Made `api_core.page_iterator.PageIterator.item_to_value` public
+- Added ability to specify retry for `Operation` and `polling.Future`. (#4922)
+
+## 0.1.4
+
+### New Features
+
+- Add `ChannelStub` to `grpc_helpers` for testing gRPC-based clients. (#4705)
+
+### Notable Implementation Changes
+
+- Fix handling of gapic metadata when specified as `None`. (#4701)
+
+## 0.1.3
+
+### Notable Implementation Changes
+
+- Apply scopes to explicitly provided credentials if needed (#4594).
+- Removing `google.api_core.gapic_v1.method.METRICS_METADATA_KEY`. It
+ can be accessed via
+ `google.api_core.gapic_v1.client_info.METRICS_METADATA_KEY` (#4588).
+
+### Dependencies
+
+- Upgrading to latest `grpcio==1.8.2` (#4642). For details, see
+ related gRPC [bug](https://github.com/grpc/grpc/issues/9688)
+ and [fix](https://github.com/grpc/grpc/pull/13665).
+
+PyPI: https://pypi.org/project/google-api-core/0.1.3/
+
+## 0.1.2
+
+- Upgrading `concurrent.futures` backport from `>= 3.0.0`
+ to `>= 3.2.0` (#4521).
+- Moved `datetime`-related helpers from `google.cloud.core` to
+ `google.api_core.datetime_helpers` (#4399).
+- Added missing `client_info` to `gapic_v1/__init__.py`'s
+ `__all__` (#4567).
+- Added helpers for routing headers to `gapic_v1` (#4336).
+
+PyPI: https://pypi.org/project/google-api-core/0.1.2/
+
+## 0.1.1
+
+### Dependencies
+
+- Upgrading `grpcio` dependency from `1.2.0, < 1.6dev` to `>= 1.7.0` (#4280)
+
+PyPI: https://pypi.org/project/google-api-core/0.1.1/
+
+## 0.1.0
+
+Initial release
+
+Prior to being separated, this package was developed in `google-cloud-core`, so
+relevant changes from that package are included here.
+
+- Add google.api.core.gapic_v1.config (#4022)
+- Add google.api.core.helpers.grpc_helpers (#4041)
+- Add google.api.core.gapic_v1.method (#4057)
+- Add wrap_with_paging (#4067)
+- Add grpc_helpers.create_channel (#4069)
+- Add DEFAULT sentinel for gapic_v1.method (#4079)
+- Remove `googleapis-common-protos` from deps in non-`core` packages. (#4098)
+- Add google.api.core.operations_v1 (#4081)
+- Fix test assertion in test_wrap_method_with_overriding_retry_deadline (#4131)
+- Add google.api.core.helpers.general_helpers.wraps (#4166)
+- Update Docs with Python Setup Guide (#4187)
+- Move modules in google.api.core.helpers up one level, delete google.api.core.helpers. (#4196)
+- Clarify that PollingFuture timeout is in seconds. (#4201)
+- Add api_core package (#4210)
+- Replace usage of google.api.core with google.api_core (#4221)
+- Add google.api_core.gapic_v2.client_info (#4225)
+- Fix how api_core.operation populates exception errors (#4231)
+- Fix bare except (#4250)
+- Fix parsing of API errors with Unicode err message (#4251)
+- Port gax proto helper methods (#4249)
+- Remove gapic_v1.method.wrap_with_paging (#4257)
+- Add final set of protobuf helpers to api_core (#4259)
+
+PyPI: https://pypi.org/project/google-api-core/0.1.0/
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..039f436
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,95 @@
+<!-- # Generated by synthtool. DO NOT EDIT! !-->
+# Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
new file mode 100644
index 0000000..6b375f0
--- /dev/null
+++ b/CONTRIBUTING.rst
@@ -0,0 +1,257 @@
+############
+Contributing
+############
+
+#. **Please sign one of the contributor license agreements below.**
+#. Fork the repo, develop and test your code changes, add docs.
+#. Make sure that your commit messages clearly describe the changes.
+#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_)
+
+.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews
+
+.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries.
+
+***************
+Adding Features
+***************
+
+In order to add a feature:
+
+- The feature must be documented in both the API and narrative
+ documentation.
+
+- The feature must work fully on the following CPython versions:
+ 3.6, 3.7, 3.8, 3.9, and 3.10 on both UNIX and Windows.
+
+- The feature must not add unnecessary dependencies (where
+ "unnecessary" is of course subjective, but new dependencies should
+ be discussed).
+
+****************************
+Using a Development Checkout
+****************************
+
+You'll have to create a development environment using a Git checkout:
+
+- While logged into your GitHub account, navigate to the
+ ``python-api-core`` `repo`_ on GitHub.
+
+- Fork and clone the ``python-api-core`` repository to your GitHub account by
+ clicking the "Fork" button.
+
+- Clone your fork of ``python-api-core`` from your GitHub account to your local
+ computer, substituting your account username and specifying the destination
+ as ``hack-on-python-api-core``. E.g.::
+
+ $ cd ${HOME}
+ $ git clone git@github.com:USERNAME/python-api-core.git hack-on-python-api-core
+ $ cd hack-on-python-api-core
+ # Configure remotes such that you can pull changes from the googleapis/python-api-core
+ # repository into your local repository.
+ $ git remote add upstream git@github.com:googleapis/python-api-core.git
+ # fetch and merge changes from upstream into main
+ $ git fetch upstream
+ $ git merge upstream/main
+
+Now your local repo is set up such that you will push changes to your GitHub
+repo, from which you can submit a pull request.
+
+To work on the codebase and run the tests, we recommend using ``nox``,
+but you can also use a ``virtualenv`` of your own creation.
+
+.. _repo: https://github.com/googleapis/python-api-core
+
+Using ``nox``
+=============
+
+We use `nox <https://nox.readthedocs.io/en/latest/>`__ to instrument our tests.
+
+- To test your changes, run unit tests with ``nox``::
+ $ nox -s unit
+
+- To run a single unit test::
+
+ $ nox -s unit-3.10 -- -k <name of test>
+
+
+ .. note::
+
+ The unit tests tests are described in the ``noxfile.py`` files
+ in each directory.
+
+.. nox: https://pypi.org/project/nox/
+
+*****************************************
+I'm getting weird errors... Can you help?
+*****************************************
+
+If the error mentions ``Python.h`` not being found,
+install ``python-dev`` and try again.
+On Debian/Ubuntu::
+
+ $ sudo apt-get install python-dev
+
+************
+Coding Style
+************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
+
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
+ If you have ``nox`` installed, you can test that you have not introduced
+ any non-compliant code via::
+
+ $ nox -s lint
+
+- In order to make ``nox -s lint`` run faster, you can set some environment
+ variables::
+
+ export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
+ export GOOGLE_CLOUD_TESTING_BRANCH="main"
+
+ By doing this, you are specifying the location of the most up-to-date
+ version of ``python-api-core``. The the suggested remote name ``upstream``
+ should point to the official ``googleapis`` checkout and the
+ the branch should be the main branch on that remote (``main``).
+
+- This repository contains configuration for the
+ `pre-commit <https://pre-commit.com/>`__ tool, which automates checking
+ our linters during a commit. If you have it installed on your ``$PATH``,
+ you can enable enforcing those checks via:
+
+.. code-block:: bash
+
+ $ pre-commit install
+ pre-commit installed at .git/hooks/pre-commit
+
+Exceptions to PEP8:
+
+- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
+ "Function-Under-Test"), which is PEP8-incompliant, but more readable.
+ Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
+
+
+*************
+Test Coverage
+*************
+
+- The codebase *must* have 100% test statement coverage after each commit.
+ You can test coverage via ``nox -s cover``.
+
+******************************************************
+Documentation Coverage and Building HTML Documentation
+******************************************************
+
+If you fix a bug, and the bug requires an API or behavior modification, all
+documentation in this package which references that API or behavior must be
+changed to reflect the bug fix, ideally in the same commit that fixes the bug
+or adds the feature.
+
+Build the docs via:
+
+ $ nox -s docs
+
+*************************
+Samples and code snippets
+*************************
+
+Code samples and snippets live in the `samples/` catalogue. Feel free to
+provide more examples, but make sure to write tests for those examples.
+Each folder containing example code requires its own `noxfile.py` script
+which automates testing. If you decide to create a new folder, you can
+base it on the `samples/snippets` folder (providing `noxfile.py` and
+the requirements files).
+
+The tests will run against a real Google Cloud Project, so you should
+configure them just like the System Tests.
+
+- To run sample tests, you can execute::
+
+ # Run all tests in a folder
+ $ cd samples/snippets
+ $ nox -s py-3.8
+
+ # Run a single sample test
+ $ cd samples/snippets
+ $ nox -s py-3.8 -- -k <name of test>
+
+********************************************
+Note About ``README`` as it pertains to PyPI
+********************************************
+
+The `description on PyPI`_ for the project comes directly from the
+``README``. Due to the reStructuredText (``rst``) parser used by
+PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
+instead of
+``https://github.com/googleapis/python-api-core/blob/main/CONTRIBUTING.rst``)
+may cause problems creating links or rendering the description.
+
+.. _description on PyPI: https://pypi.org/project/google-api-core
+
+
+*************************
+Supported Python Versions
+*************************
+
+We support:
+
+- `Python 3.6`_
+- `Python 3.7`_
+- `Python 3.8`_
+- `Python 3.9`_
+- `Python 3.10`_
+
+.. _Python 3.6: https://docs.python.org/3.6/
+.. _Python 3.7: https://docs.python.org/3.7/
+.. _Python 3.8: https://docs.python.org/3.8/
+.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
+
+
+Supported versions can be found in our ``noxfile.py`` `config`_.
+
+.. _config: https://github.com/googleapis/python-api-core/blob/main/noxfile.py
+
+
+We also explicitly decided to support Python 3 beginning with version 3.6.
+Reasons for this include:
+
+- Encouraging use of newest versions of Python 3
+- Taking the lead of `prominent`_ open-source `projects`_
+- `Unicode literal support`_ which allows for a cleaner codebase that
+ works in both Python 2 and Python 3
+
+.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
+.. _projects: http://flask.pocoo.org/docs/0.10/python3/
+.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
+
+**********
+Versioning
+**********
+
+This library follows `Semantic Versioning`_.
+
+.. _Semantic Versioning: http://semver.org/
+
+Some packages are currently in major version zero (``0.y.z``), which means that
+anything may change at any time and the public API should not be considered
+stable.
+
+******************************
+Contributor License Agreements
+******************************
+
+Before we can accept your pull requests you'll need to sign a Contributor
+License Agreement (CLA):
+
+- **If you are an individual writing original source code** and **you own the
+ intellectual property**, then you'll need to sign an
+ `individual CLA <https://developers.google.com/open-source/cla/individual>`__.
+- **If you work for a company that wants to allow you to contribute your work**,
+ then you'll need to sign a
+ `corporate CLA <https://developers.google.com/open-source/cla/corporate>`__.
+
+You can sign these electronically (just scroll to the bottom). After that,
+we'll be able to accept your pull requests.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..e783f4c
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+include README.rst LICENSE
+recursive-include google *.json *.proto py.typed
+recursive-include tests *
+global-exclude *.py[co]
+global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..cfe802c
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,18 @@
+name: "python-api-core"
+description:
+ "This library is not meant to stand-alone. Instead it defines common "
+ "helpers used by all Google API clients."
+
+third_party {
+ url {
+ type: HOMEPAGE
+ value: "https://pypi.org/project/google-api-core/"
+ }
+ url {
+ type: GIT
+ value: "https://github.com/googleapis/python-api-core"
+ }
+ version: "v2.3.0"
+ last_upgrade_date { year: 2022 month: 1 day: 4 }
+ license_type: NOTICE
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/NOTICE b/NOTICE
new file mode 120000
index 0000000..7a694c9
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1 @@
+LICENSE \ No newline at end of file
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..d94f3e8
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,28 @@
+Core Library for Google Client Libraries
+========================================
+
+|pypi| |versions|
+
+This library is not meant to stand-alone. Instead it defines
+common helpers used by all Google API clients. For more information, see the
+`documentation`_.
+
+.. |pypi| image:: https://img.shields.io/pypi/v/google-api_core.svg
+ :target: https://pypi.org/project/google-api_core/
+.. |versions| image:: https://img.shields.io/pypi/pyversions/google-api_core.svg
+ :target: https://pypi.org/project/google-api_core/
+.. _documentation: https://googleapis.dev/python/google-api-core/latest
+
+
+Supported Python Versions
+-------------------------
+Python >= 3.6
+
+
+Unsupported Python Versions
+---------------------------
+
+Python == 2.7, Python == 3.5.
+
+The last version of this library compatible with Python 2.7 and 3.5 is
+`google-api_core==1.31.1`.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..8b58ae9
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,7 @@
+# Security Policy
+
+To report a security issue, please use [g.co/vulnz](https://g.co/vulnz).
+
+The Google Security Team will respond within 5 working days of your report on g.co/vulnz.
+
+We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue.
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
new file mode 100644
index 0000000..b0a2954
--- /dev/null
+++ b/docs/_static/custom.css
@@ -0,0 +1,20 @@
+div#python2-eol {
+ border-color: red;
+ border-width: medium;
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
+
+/* Insert space between methods for readability */
+dl.method {
+ padding-top: 10px;
+ padding-bottom: 10px
+}
+
+/* Insert empty space between classes */
+dl.class {
+ padding-bottom: 50px
+}
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
new file mode 100644
index 0000000..6316a53
--- /dev/null
+++ b/docs/_templates/layout.html
@@ -0,0 +1,50 @@
+
+{% extends "!layout.html" %}
+{%- block content %}
+{%- if theme_fixed_sidebar|lower == 'true' %}
+ <div class="document">
+ {{ sidebar() }}
+ {%- block document %}
+ <div class="documentwrapper">
+ {%- if render_sidebar %}
+ <div class="bodywrapper">
+ {%- endif %}
+
+ {%- block relbar_top %}
+ {%- if theme_show_relbar_top|tobool %}
+ <div class="related top">
+ &nbsp;
+ {{- rellink_markup () }}
+ </div>
+ {%- endif %}
+ {% endblock %}
+
+ <div class="body" role="main">
+ <div class="admonition" id="python2-eol">
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
+ visit <a href="https://cloud.google.com/python/docs/python2-sunset/">Python 2 support on Google Cloud</a>.
+ </div>
+ {% block body %} {% endblock %}
+ </div>
+
+ {%- block relbar_bottom %}
+ {%- if theme_show_relbar_bottom|tobool %}
+ <div class="related bottom">
+ &nbsp;
+ {{- rellink_markup () }}
+ </div>
+ {%- endif %}
+ {% endblock %}
+
+ {%- if render_sidebar %}
+ </div>
+ {%- endif %}
+ </div>
+ {%- endblock %}
+ <div class="clearer"></div>
+ </div>
+{%- else %}
+{{ super() }}
+{%- endif %}
+{%- endblock %}
diff --git a/docs/auth.rst b/docs/auth.rst
new file mode 100644
index 0000000..a9b296d
--- /dev/null
+++ b/docs/auth.rst
@@ -0,0 +1,213 @@
+Authentication
+**************
+
+.. _Overview:
+
+Overview
+========
+
+For a language agnostic overview of authentication on Google Cloud, see `Authentication Overview`_.
+
+.. _Authentication Overview: https://cloud.google.com/docs/authentication
+
+* **If you're running in a Google Virtual Machine Environment (Compute Engine, App Engine, Cloud Run, Cloud Functions)**,
+ authentication should "just work".
+
+* **If you're developing locally**,
+ the easiest way to authenticate is using the `Google Cloud SDK`_:
+
+ .. code-block:: bash
+
+ $ gcloud auth application-default login
+
+ Note that this command generates credentials for client libraries. To authenticate the CLI itself, use:
+
+ .. code-block:: bash
+
+ $ gcloud auth login
+
+ Previously, ``gcloud auth login`` was used for both use cases. If
+ your ``gcloud`` installation does not support the new command,
+ please update it:
+
+ .. code-block:: bash
+
+ $ gcloud components update
+
+.. _Google Cloud SDK: http://cloud.google.com/sdk
+
+
+* **If you're running your application elsewhere**,
+ you should download a `service account`_ JSON keyfile
+ and point to it using an environment variable:
+
+ .. code-block:: bash
+
+ $ export GOOGLE_APPLICATION_CREDENTIALS="/path/to/keyfile.json"
+
+.. _service account: https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating
+
+Client-Provided Authentication
+==============================
+
+Every package uses a :class:`Client <google.cloud.client.Client>`
+as a base for interacting with an API.
+For example:
+
+.. code-block:: python
+
+ from google.cloud import datastore
+ client = datastore.Client()
+
+Passing no arguments at all will "just work" if you've followed the
+instructions in the :ref:`Overview`.
+The credentials are inferred from your local environment by using
+Google `Application Default Credentials`_.
+
+.. _Application Default Credentials: https://developers.google.com/identity/protocols/application-default-credentials
+
+.. _Precedence:
+
+Credential Discovery Precedence
+-------------------------------
+
+When loading the `Application Default Credentials`_,
+the library will check for credentials in your environment by following the
+precedence outlined by :func:`google.auth.default`.
+
+Explicit Credentials
+====================
+
+The Application Default Credentials discussed above can be useful
+if your code needs to run in many different environments or
+if you just don't want authentication to be a focus in your code.
+
+However, you may want to be explicit because
+
+* your code will only run in one place
+* you may have code which needs to be run as a specific service account
+ every time (rather than with the locally inferred credentials)
+* you may want to use two separate accounts to simultaneously access data
+ from different projects
+
+In these situations, you can create an explicit
+:class:`~google.auth.credentials.Credentials` object suited to your environment.
+After creation, you can pass it directly to a :class:`Client <google.cloud.client.Client>`:
+
+.. code:: python
+
+ client = Client(credentials=credentials)
+
+.. tip::
+ To create a credentials object, follow the `google-auth-guide`_.
+
+.. _google-auth-guide: https://googleapis.dev/python/google-auth/latest/user-guide.html#service-account-private-key-files
+
+Google Compute Engine Environment
+---------------------------------
+
+These credentials are used in Google Virtual Machine Environments.
+This includes most App Engine runtimes, Compute Engine, Cloud
+Functions, and Cloud Run.
+
+To create
+:class:`credentials <google.auth.compute_engine.Credentials>`:
+
+.. code:: python
+
+ from google.auth import compute_engine
+ credentials = compute_engine.Credentials()
+
+Service Accounts
+----------------
+
+A `service account`_ is stored in a JSON keyfile.
+
+.. code:: python
+
+ from google.oauth2 import service_account
+
+ credentials = service_account.Credentials.from_service_account_file(
+ '/path/to/key.json')
+
+A JSON string or dictionary:
+
+.. code:: python
+
+ import json
+
+ from google.oauth2 import service_account
+
+ json_account_info = json.loads(...) # convert JSON to dictionary
+ credentials = service_account.Credentials.from_service_account_info(
+ json_account_info)
+
+.. tip::
+
+ Previously the Google Cloud Console would issue a PKCS12/P12 key for your
+ service account. This library does not support that key format. You can
+ generate a new JSON key for the same service account from the console.
+
+User Accounts (3-legged OAuth 2.0) with a refresh token
+-------------------------------------------------------
+
+The majority of cases are intended to authenticate machines or
+workers rather than actual user accounts. However, it's also
+possible to call Google Cloud APIs with a user account via
+`OAuth 2.0`_.
+
+.. _OAuth 2.0: https://developers.google.com/identity/protocols/OAuth2
+
+.. tip::
+
+ A production application should **use a service account**,
+ but you may wish to use your own personal user account when first
+ getting started with the ``google-cloud-*`` library.
+
+The simplest way to use credentials from a user account is via
+Application Default Credentials using ``gcloud auth login``
+(as mentioned above) and :func:`google.auth.default`:
+
+.. code:: python
+
+ import google.auth
+
+ credentials, project = google.auth.default()
+
+This will still follow the :ref:`precedence <Precedence>`
+described above,
+so be sure none of the other possible environments conflict
+with your user provided credentials.
+
+Troubleshooting
+===============
+
+Setting up a Service Account
+----------------------------
+
+If your application is not running on a Google Virtual Machine Environment,
+you need a Service Account. See `Creating a Service Account`_.
+
+.. _Creating a Service Account: https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating
+
+Using Google Compute Engine
+---------------------------
+
+If your code is running on Google Compute Engine,
+using the inferred Google `Application Default Credentials`_
+will be sufficient for retrieving credentials.
+
+However, by default your credentials may not grant you
+access to the services you intend to use.
+Be sure when you `set up the GCE instance`_,
+you add the correct scopes for the APIs you want to access:
+
+* **All APIs**
+
+ * ``https://www.googleapis.com/auth/cloud-platform``
+ * ``https://www.googleapis.com/auth/cloud-platform.read-only``
+
+For scopes for specific APIs see `OAuth 2.0 Scopes for Google APIs`_
+
+.. _set up the GCE instance: https://cloud.google.com/compute/docs/authentication#using
+.. _OAuth 2.0 Scopes for Google APIS: https://developers.google.com/identity/protocols/oauth2/scopes
diff --git a/docs/changelog.md b/docs/changelog.md
new file mode 120000
index 0000000..04c99a5
--- /dev/null
+++ b/docs/changelog.md
@@ -0,0 +1 @@
+../CHANGELOG.md \ No newline at end of file
diff --git a/docs/client_info.rst b/docs/client_info.rst
new file mode 100644
index 0000000..e976b18
--- /dev/null
+++ b/docs/client_info.rst
@@ -0,0 +1,11 @@
+Client Information Helpers
+==========================
+
+.. automodule:: google.api_core.client_info
+ :members:
+ :show-inheritance:
+
+.. automodule:: google.api_core.gapic_v1.client_info
+ :members:
+ :show-inheritance:
+
diff --git a/docs/client_options.rst b/docs/client_options.rst
new file mode 100644
index 0000000..da7c9a3
--- /dev/null
+++ b/docs/client_options.rst
@@ -0,0 +1,6 @@
+Client Options
+==========================
+
+.. automodule:: google.api_core.client_options
+ :members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..09f0c2b
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,375 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# google-api-core documentation build configuration file
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath(".."))
+
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
+__version__ = ""
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = "1.5.5"
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosummary",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.coverage",
+ "sphinx.ext.doctest",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.todo",
+ "sphinx.ext.viewcode",
+ "recommonmark",
+]
+
+# autodoc/autosummary flags
+autoclass_content = "both"
+autodoc_default_options = {"members": True}
+autosummary_generate = True
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = [".rst", ".md"]
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The root toctree document.
+root_doc = "index"
+
+# General information about the project.
+project = "google-api-core"
+copyright = "2019, Google"
+author = "Google APIs"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The full version, including alpha/beta/rc tags.
+release = __version__
+# The short X.Y version.
+version = ".".join(release.split(".")[0:2])
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = [
+ "_build",
+ "**/.nox/**/*",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = "alabaster"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+ "description": "Google Cloud Client Libraries for google-api-core",
+ "github_user": "googleapis",
+ "github_repo": "python-api-core",
+ "github_banner": True,
+ "font_family": "'Roboto', Georgia, sans",
+ "head_font_family": "'Roboto', Georgia, serif",
+ "code_font_family": "'Roboto Mono', 'Consolas', monospace",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+# html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+# html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+# html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+# html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "google-api-core-doc"
+
+# -- Options for warnings ------------------------------------------------------
+
+
+suppress_warnings = [
+ # Temporarily suppress this to avoid "more than one target found for
+ # cross-reference" warning, which are intractable for us to avoid while in
+ # a mono-repo.
+ # See https://github.com/sphinx-doc/sphinx/blob
+ # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
+ "ref.python"
+]
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #'papersize': 'letterpaper',
+ # The font size ('10pt', '11pt' or '12pt').
+ #'pointsize': '10pt',
+ # Additional stuff for the LaTeX preamble.
+ #'preamble': '',
+ # Latex figure (float) alignment
+ #'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (
+ root_doc,
+ "google-api-core.tex",
+ "google-api-core Documentation",
+ author,
+ "manual",
+ )
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (root_doc, "google-api-core", "google-api-core Documentation", [author], 1,)
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (
+ root_doc,
+ "google-api-core",
+ "google-api-core Documentation",
+ author,
+ "google-api-core",
+ "google-api-core Library",
+ "APIs",
+ )
+]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
+ "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
+}
+
+
+# Napoleon settings
+napoleon_google_docstring = True
+napoleon_numpy_docstring = True
+napoleon_include_private_with_doc = False
+napoleon_include_special_with_doc = True
+napoleon_use_admonition_for_examples = False
+napoleon_use_admonition_for_notes = False
+napoleon_use_admonition_for_references = False
+napoleon_use_ivar = False
+napoleon_use_param = True
+napoleon_use_rtype = True
diff --git a/docs/exceptions.rst b/docs/exceptions.rst
new file mode 100644
index 0000000..d671f4e
--- /dev/null
+++ b/docs/exceptions.rst
@@ -0,0 +1,6 @@
+Exceptions
+==========
+
+.. automodule:: google.api_core.exceptions
+ :members:
+ :show-inheritance:
diff --git a/docs/futures.rst b/docs/futures.rst
new file mode 100644
index 0000000..d0dadac
--- /dev/null
+++ b/docs/futures.rst
@@ -0,0 +1,14 @@
+Futures
+=======================
+
+.. automodule:: google.api_core.future
+ :members:
+ :show-inheritance:
+
+.. automodule:: google.api_core.future.polling
+ :members:
+ :show-inheritance:
+
+.. automodule:: google.api_core.future.async_future
+ :members:
+ :show-inheritance:
diff --git a/docs/helpers.rst b/docs/helpers.rst
new file mode 100644
index 0000000..6f72df9
--- /dev/null
+++ b/docs/helpers.rst
@@ -0,0 +1,26 @@
+Helpers
+=======
+
+
+General Helpers
+---------------
+
+.. automodule:: google.api_core.general_helpers
+ :members:
+ :show-inheritance:
+
+
+Datetime Helpers
+----------------
+
+.. automodule:: google.api_core.datetime_helpers
+ :members:
+ :show-inheritance:
+
+
+gRPC Helpers
+------------
+
+.. automodule:: google.api_core.grpc_helpers
+ :members:
+ :show-inheritance:
diff --git a/docs/iam.rst b/docs/iam.rst
new file mode 100644
index 0000000..bb80ae3
--- /dev/null
+++ b/docs/iam.rst
@@ -0,0 +1,7 @@
+Identity and Access Management
+==============================
+
+.. automodule:: google.api_core.iam
+ :members:
+ :show-inheritance:
+ :member-order: bysource
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..858e889
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,32 @@
+The ``google-cloud-core`` package contains helpers common to all
+``google-cloud-*`` packages. In an attempt to reach a stable API,
+much of the functionality has been split out into this package,
+``google-api-core``.
+
+.. include:: multiprocessing.rst
+
+Core
+====
+
+.. toctree::
+ auth
+ client_info
+ client_options
+ exceptions
+ futures
+ helpers
+ iam
+ operation
+ operations_client
+ page_iterator
+ path_template
+ retry
+ timeout
+
+Changelog
+~~~~~~~~~
+
+.. toctree::
+ :maxdepth: 2
+
+ changelog
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
new file mode 100644
index 0000000..536d17b
--- /dev/null
+++ b/docs/multiprocessing.rst
@@ -0,0 +1,7 @@
+.. note::
+
+ Because this client uses :mod:`grpc` library, it is safe to
+ share instances across threads. In multiprocessing scenarios, the best
+ practice is to create client instances *after* the invocation of
+ :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
+ :class:`multiprocessing.Process`.
diff --git a/docs/operation.rst b/docs/operation.rst
new file mode 100644
index 0000000..492cf67
--- /dev/null
+++ b/docs/operation.rst
@@ -0,0 +1,13 @@
+Long-Running Operations
+=======================
+
+.. automodule:: google.api_core.operation
+ :members:
+ :show-inheritance:
+
+Long-Running Operations in AsyncIO
+-------------------------------------
+
+.. automodule:: google.api_core.operation_async
+ :members:
+ :show-inheritance:
diff --git a/docs/operations_client.rst b/docs/operations_client.rst
new file mode 100644
index 0000000..be466d3
--- /dev/null
+++ b/docs/operations_client.rst
@@ -0,0 +1,6 @@
+Long-Running Operations Client
+==============================
+
+.. automodule:: google.api_core.operations_v1
+ :members:
+ :show-inheritance:
diff --git a/docs/page_iterator.rst b/docs/page_iterator.rst
new file mode 100644
index 0000000..3652e6d
--- /dev/null
+++ b/docs/page_iterator.rst
@@ -0,0 +1,13 @@
+Page Iterators
+==============
+
+.. automodule:: google.api_core.page_iterator
+ :members:
+ :show-inheritance:
+
+Page Iterators in AsyncIO
+-------------------------
+
+.. automodule:: google.api_core.page_iterator_async
+ :members:
+ :show-inheritance:
diff --git a/docs/path_template.rst b/docs/path_template.rst
new file mode 100644
index 0000000..220779e
--- /dev/null
+++ b/docs/path_template.rst
@@ -0,0 +1,6 @@
+Path Templates
+==============
+
+.. automodule:: google.api_core.path_template
+ :members:
+ :show-inheritance:
diff --git a/docs/retry.rst b/docs/retry.rst
new file mode 100644
index 0000000..97a7f2c
--- /dev/null
+++ b/docs/retry.rst
@@ -0,0 +1,13 @@
+Retry
+=====
+
+.. automodule:: google.api_core.retry
+ :members:
+ :show-inheritance:
+
+Retry in AsyncIO
+----------------
+
+.. automodule:: google.api_core.retry_async
+ :members:
+ :show-inheritance:
diff --git a/docs/timeout.rst b/docs/timeout.rst
new file mode 100644
index 0000000..943d425
--- /dev/null
+++ b/docs/timeout.rst
@@ -0,0 +1,6 @@
+Timeout
+=======
+
+.. automodule:: google.api_core.timeout
+ :members:
+ :show-inheritance:
diff --git a/google/__init__.py b/google/__init__.py
new file mode 100644
index 0000000..9f1d549
--- /dev/null
+++ b/google/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google namespace package."""
+
+try:
+ import pkg_resources
+
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+
+ # See: https://github.com/python/mypy/issues/1422
+ __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore
diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py
new file mode 100644
index 0000000..605dd8b
--- /dev/null
+++ b/google/api_core/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google API Core.
+
+This package contains common code and utilties used by Google client libraries.
+"""
+
+from google.api_core import version as api_core_version
+
+__version__ = api_core_version.__version__
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
new file mode 100644
index 0000000..4b4963f
--- /dev/null
+++ b/google/api_core/bidi.py
@@ -0,0 +1,735 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bi-directional streaming RPC helpers."""
+
+import collections
+import datetime
+import logging
+import queue as queue_module
+import threading
+import time
+
+from google.api_core import exceptions
+
+_LOGGER = logging.getLogger(__name__)
+_BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream"
+
+
+class _RequestQueueGenerator(object):
+ """A helper for sending requests to a gRPC stream from a Queue.
+
+ This generator takes requests off a given queue and yields them to gRPC.
+
+ This helper is useful when you have an indeterminate, indefinite, or
+ otherwise open-ended set of requests to send through a request-streaming
+ (or bidirectional) RPC.
+
+ The reason this is necessary is because gRPC takes an iterator as the
+ request for request-streaming RPCs. gRPC consumes this iterator in another
+ thread to allow it to block while generating requests for the stream.
+ However, if the generator blocks indefinitely gRPC will not be able to
+ clean up the thread as it'll be blocked on `next(iterator)` and not be able
+ to check the channel status to stop iterating. This helper mitigates that
+ by waiting on the queue with a timeout and checking the RPC state before
+ yielding.
+
+ Finally, it allows for retrying without swapping queues because if it does
+ pull an item off the queue when the RPC is inactive, it'll immediately put
+ it back and then exit. This is necessary because yielding the item in this
+ case will cause gRPC to discard it. In practice, this means that the order
+ of messages is not guaranteed. If such a thing is necessary it would be
+ easy to use a priority queue.
+
+ Example::
+
+ requests = request_queue_generator(q)
+ call = stub.StreamingRequest(iter(requests))
+ requests.call = call
+
+ for response in call:
+ print(response)
+ q.put(...)
+
+ Note that it is possible to accomplish this behavior without "spinning"
+ (using a queue timeout). One possible way would be to use more threads to
+ multiplex the grpc end event with the queue, another possible way is to
+ use selectors and a custom event/queue object. Both of these approaches
+ are significant from an engineering perspective for small benefit - the
+ CPU consumed by spinning is pretty minuscule.
+
+ Args:
+ queue (queue_module.Queue): The request queue.
+ period (float): The number of seconds to wait for items from the queue
+ before checking if the RPC is cancelled. In practice, this
+ determines the maximum amount of time the request consumption
+ thread will live after the RPC is cancelled.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is done independently of the request queue to allow fo
+ easily restarting streams that require some initial configuration
+ request.
+ """
+
+ def __init__(self, queue, period=1, initial_request=None):
+ self._queue = queue
+ self._period = period
+ self._initial_request = initial_request
+ self.call = None
+
+ def _is_active(self):
+ # Note: there is a possibility that this starts *before* the call
+ # property is set. So we have to check if self.call is set before
+ # seeing if it's active.
+ if self.call is not None and not self.call.is_active():
+ return False
+ else:
+ return True
+
+ def __iter__(self):
+ if self._initial_request is not None:
+ if callable(self._initial_request):
+ yield self._initial_request()
+ else:
+ yield self._initial_request
+
+ while True:
+ try:
+ item = self._queue.get(timeout=self._period)
+ except queue_module.Empty:
+ if not self._is_active():
+ _LOGGER.debug(
+ "Empty queue and inactive call, exiting request " "generator."
+ )
+ return
+ else:
+ # call is still active, keep waiting for queue items.
+ continue
+
+ # The consumer explicitly sent "None", indicating that the request
+ # should end.
+ if item is None:
+ _LOGGER.debug("Cleanly exiting request generator.")
+ return
+
+ if not self._is_active():
+ # We have an item, but the call is closed. We should put the
+ # item back on the queue so that the next call can consume it.
+ self._queue.put(item)
+ _LOGGER.debug(
+ "Inactive call, replacing item on queue and exiting "
+ "request generator."
+ )
+ return
+
+ yield item
+
+
+class _Throttle(object):
+ """A context manager limiting the total entries in a sliding time window.
+
+ If more than ``access_limit`` attempts are made to enter the context manager
+ instance in the last ``time window`` interval, the exceeding requests block
+ until enough time elapses.
+
+ The context manager instances are thread-safe and can be shared between
+ multiple threads. If multiple requests are blocked and waiting to enter,
+ the exact order in which they are allowed to proceed is not determined.
+
+ Example::
+
+ max_three_per_second = _Throttle(
+ access_limit=3, time_window=datetime.timedelta(seconds=1)
+ )
+
+ for i in range(5):
+ with max_three_per_second as time_waited:
+ print("{}: Waited {} seconds to enter".format(i, time_waited))
+
+ Args:
+ access_limit (int): the maximum number of entries allowed in the time window
+ time_window (datetime.timedelta): the width of the sliding time window
+ """
+
+ def __init__(self, access_limit, time_window):
+ if access_limit < 1:
+ raise ValueError("access_limit argument must be positive")
+
+ if time_window <= datetime.timedelta(0):
+ raise ValueError("time_window argument must be a positive timedelta")
+
+ self._time_window = time_window
+ self._access_limit = access_limit
+ self._past_entries = collections.deque(
+ maxlen=access_limit
+ ) # least recent first
+ self._entry_lock = threading.Lock()
+
+ def __enter__(self):
+ with self._entry_lock:
+ cutoff_time = datetime.datetime.now() - self._time_window
+
+ # drop the entries that are too old, as they are no longer relevant
+ while self._past_entries and self._past_entries[0] < cutoff_time:
+ self._past_entries.popleft()
+
+ if len(self._past_entries) < self._access_limit:
+ self._past_entries.append(datetime.datetime.now())
+ return 0.0 # no waiting was needed
+
+ to_wait = (self._past_entries[0] - cutoff_time).total_seconds()
+ time.sleep(to_wait)
+
+ self._past_entries.append(datetime.datetime.now())
+ return to_wait
+
+ def __exit__(self, *_):
+ pass
+
+ def __repr__(self):
+ return "{}(access_limit={}, time_window={})".format(
+ self.__class__.__name__, self._access_limit, repr(self._time_window)
+ )
+
+
+class BidiRpc(object):
+ """A helper for consuming a bi-directional streaming RPC.
+
+ This maps gRPC's built-in interface which uses a request iterator and a
+ response iterator into a socket-like :func:`send` and :func:`recv`. This
+ is a more useful pattern for long-running or asymmetric streams (streams
+ where there is not a direct correlation between the requests and
+ responses).
+
+ Example::
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+ rpc = BidiRpc(
+ stub.StreamingRpc,
+ initial_request=initial_request,
+ metadata=[('name', 'value')]
+ )
+
+ rpc.open()
+
+ while rpc.is_active():
+ print(rpc.recv())
+ rpc.send(example_pb2.StreamingRpcRequest(
+ data='example'))
+
+ This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`.
+
+ Args:
+ start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
+ start the RPC.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is useful if an initial request is needed to start the
+ stream.
+ metadata (Sequence[Tuple(str, str)]): RPC metadata to include in
+ the request.
+ """
+
+ def __init__(self, start_rpc, initial_request=None, metadata=None):
+ self._start_rpc = start_rpc
+ self._initial_request = initial_request
+ self._rpc_metadata = metadata
+ self._request_queue = queue_module.Queue()
+ self._request_generator = None
+ self._is_active = False
+ self._callbacks = []
+ self.call = None
+
+ def add_done_callback(self, callback):
+ """Adds a callback that will be called when the RPC terminates.
+
+ This occurs when the RPC errors or is successfully terminated.
+
+ Args:
+ callback (Callable[[grpc.Future], None]): The callback to execute.
+ It will be provided with the same gRPC future as the underlying
+ stream which will also be a :class:`grpc.Call`.
+ """
+ self._callbacks.append(callback)
+
+ def _on_call_done(self, future):
+ for callback in self._callbacks:
+ callback(future)
+
+ def open(self):
+ """Opens the stream."""
+ if self.is_active:
+ raise ValueError("Can not open an already open stream.")
+
+ request_generator = _RequestQueueGenerator(
+ self._request_queue, initial_request=self._initial_request
+ )
+ call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+
+ request_generator.call = call
+
+ # TODO: api_core should expose the future interface for wrapped
+ # callables as well.
+ if hasattr(call, "_wrapped"): # pragma: NO COVER
+ call._wrapped.add_done_callback(self._on_call_done)
+ else:
+ call.add_done_callback(self._on_call_done)
+
+ self._request_generator = request_generator
+ self.call = call
+
+ def close(self):
+ """Closes the stream."""
+ if self.call is None:
+ return
+
+ self._request_queue.put(None)
+ self.call.cancel()
+ self._request_generator = None
+ # Don't set self.call to None. Keep it around so that send/recv can
+ # raise the error.
+
+ def send(self, request):
+ """Queue a message to be sent on the stream.
+
+ Send is non-blocking.
+
+ If the underlying RPC has been closed, this will raise.
+
+ Args:
+ request (protobuf.Message): The request to send.
+ """
+ if self.call is None:
+ raise ValueError("Can not send() on an RPC that has never been open()ed.")
+
+ # Don't use self.is_active(), as ResumableBidiRpc will overload it
+ # to mean something semantically different.
+ if self.call.is_active():
+ self._request_queue.put(request)
+ else:
+ # calling next should cause the call to raise.
+ next(self.call)
+
+ def recv(self):
+ """Wait for a message to be returned from the stream.
+
+ Recv is blocking.
+
+ If the underlying RPC has been closed, this will raise.
+
+ Returns:
+ protobuf.Message: The received message.
+ """
+ if self.call is None:
+ raise ValueError("Can not recv() on an RPC that has never been open()ed.")
+
+ return next(self.call)
+
+ @property
+ def is_active(self):
+ """bool: True if this stream is currently open and active."""
+ return self.call is not None and self.call.is_active()
+
+ @property
+ def pending_requests(self):
+ """int: Returns an estimate of the number of queued requests."""
+ return self._request_queue.qsize()
+
+
+def _never_terminate(future_or_error):
+ """By default, no errors cause BiDi termination."""
+ return False
+
+
+class ResumableBidiRpc(BidiRpc):
+ """A :class:`BidiRpc` that can automatically resume the stream on errors.
+
+ It uses the ``should_recover`` arg to determine if it should re-establish
+ the stream on error.
+
+ Example::
+
+ def should_recover(exc):
+ return (
+ isinstance(exc, grpc.RpcError) and
+ exc.code() == grpc.StatusCode.UNVAILABLE)
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+
+ metadata = [('header_name', 'value')]
+
+ rpc = ResumableBidiRpc(
+ stub.StreamingRpc,
+ should_recover=should_recover,
+ initial_request=initial_request,
+ metadata=metadata
+ )
+
+ rpc.open()
+
+ while rpc.is_active():
+ print(rpc.recv())
+ rpc.send(example_pb2.StreamingRpcRequest(
+ data='example'))
+
+ Args:
+ start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to
+ start the RPC.
+ initial_request (Union[protobuf.Message,
+ Callable[None, protobuf.Message]]): The initial request to
+ yield. This is useful if an initial request is needed to start the
+ stream.
+ should_recover (Callable[[Exception], bool]): A function that returns
+ True if the stream should be recovered. This will be called
+ whenever an error is encountered on the stream.
+ should_terminate (Callable[[Exception], bool]): A function that returns
+ True if the stream should be terminated. This will be called
+ whenever an error is encountered on the stream.
+ metadata Sequence[Tuple(str, str)]: RPC metadata to include in
+ the request.
+ throttle_reopen (bool): If ``True``, throttling will be applied to
+ stream reopen calls. Defaults to ``False``.
+ """
+
+ def __init__(
+ self,
+ start_rpc,
+ should_recover,
+ should_terminate=_never_terminate,
+ initial_request=None,
+ metadata=None,
+ throttle_reopen=False,
+ ):
+ super(ResumableBidiRpc, self).__init__(start_rpc, initial_request, metadata)
+ self._should_recover = should_recover
+ self._should_terminate = should_terminate
+ self._operational_lock = threading.RLock()
+ self._finalized = False
+ self._finalize_lock = threading.Lock()
+
+ if throttle_reopen:
+ self._reopen_throttle = _Throttle(
+ access_limit=5, time_window=datetime.timedelta(seconds=10)
+ )
+ else:
+ self._reopen_throttle = None
+
+ def _finalize(self, result):
+ with self._finalize_lock:
+ if self._finalized:
+ return
+
+ for callback in self._callbacks:
+ callback(result)
+
+ self._finalized = True
+
+ def _on_call_done(self, future):
+ # Unlike the base class, we only execute the callbacks on a terminal
+ # error, not for errors that we can recover from. Note that grpc's
+ # "future" here is also a grpc.RpcError.
+ with self._operational_lock:
+ if self._should_terminate(future):
+ self._finalize(future)
+ elif not self._should_recover(future):
+ self._finalize(future)
+ else:
+ _LOGGER.debug("Re-opening stream from gRPC callback.")
+ self._reopen()
+
+ def _reopen(self):
+ with self._operational_lock:
+ # Another thread already managed to re-open this stream.
+ if self.call is not None and self.call.is_active():
+ _LOGGER.debug("Stream was already re-established.")
+ return
+
+ self.call = None
+ # Request generator should exit cleanly since the RPC its bound to
+ # has exited.
+ self._request_generator = None
+
+ # Note: we do not currently do any sort of backoff here. The
+ # assumption is that re-establishing the stream under normal
+ # circumstances will happen in intervals greater than 60s.
+ # However, it is possible in a degenerative case that the server
+ # closes the stream rapidly which would lead to thrashing here,
+ # but hopefully in those cases the server would return a non-
+ # retryable error.
+
+ try:
+ if self._reopen_throttle:
+ with self._reopen_throttle:
+ self.open()
+ else:
+ self.open()
+ # If re-opening or re-calling the method fails for any reason,
+ # consider it a terminal error and finalize the stream.
+ except Exception as exc:
+ _LOGGER.debug("Failed to re-open stream due to %s", exc)
+ self._finalize(exc)
+ raise
+
+ _LOGGER.info("Re-established stream")
+
+ def _recoverable(self, method, *args, **kwargs):
+ """Wraps a method to recover the stream and retry on error.
+
+ If a retryable error occurs while making the call, then the stream will
+ be re-opened and the method will be retried. This happens indefinitely
+ so long as the error is a retryable one. If an error occurs while
+ re-opening the stream, then this method will raise immediately and
+ trigger finalization of this object.
+
+ Args:
+ method (Callable[..., Any]): The method to call.
+ args: The args to pass to the method.
+ kwargs: The kwargs to pass to the method.
+ """
+ while True:
+ try:
+ return method(*args, **kwargs)
+
+ except Exception as exc:
+ with self._operational_lock:
+ _LOGGER.debug("Call to retryable %r caused %s.", method, exc)
+
+ if self._should_terminate(exc):
+ self.close()
+ _LOGGER.debug("Terminating %r due to %s.", method, exc)
+ self._finalize(exc)
+ break
+
+ if not self._should_recover(exc):
+ self.close()
+ _LOGGER.debug("Not retrying %r due to %s.", method, exc)
+ self._finalize(exc)
+ raise exc
+
+ _LOGGER.debug("Re-opening stream from retryable %r.", method)
+ self._reopen()
+
+ def _send(self, request):
+ # Grab a reference to the RPC call. Because another thread (notably
+ # the gRPC error thread) can modify self.call (by invoking reopen),
+ # we should ensure our reference can not change underneath us.
+ # If self.call is modified (such as replaced with a new RPC call) then
+ # this will use the "old" RPC, which should result in the same
+ # exception passed into gRPC's error handler being raised here, which
+ # will be handled by the usual error handling in retryable.
+ with self._operational_lock:
+ call = self.call
+
+ if call is None:
+ raise ValueError("Can not send() on an RPC that has never been open()ed.")
+
+ # Don't use self.is_active(), as ResumableBidiRpc will overload it
+ # to mean something semantically different.
+ if call.is_active():
+ self._request_queue.put(request)
+ pass
+ else:
+ # calling next should cause the call to raise.
+ next(call)
+
+ def send(self, request):
+ return self._recoverable(self._send, request)
+
+ def _recv(self):
+ with self._operational_lock:
+ call = self.call
+
+ if call is None:
+ raise ValueError("Can not recv() on an RPC that has never been open()ed.")
+
+ return next(call)
+
+ def recv(self):
+ return self._recoverable(self._recv)
+
+ def close(self):
+ self._finalize(None)
+ super(ResumableBidiRpc, self).close()
+
+ @property
+ def is_active(self):
+ """bool: True if this stream is currently open and active."""
+ # Use the operational lock. It's entirely possible for something
+ # to check the active state *while* the RPC is being retried.
+ # Also, use finalized to track the actual terminal state here.
+ # This is because if the stream is re-established by the gRPC thread
+ # it's technically possible to check this between when gRPC marks the
+ # RPC as inactive and when gRPC executes our callback that re-opens
+ # the stream.
+ with self._operational_lock:
+ return self.call is not None and not self._finalized
+
+
+class BackgroundConsumer(object):
+ """A bi-directional stream consumer that runs in a separate thread.
+
+ This maps the consumption of a stream into a callback-based model. It also
+ provides :func:`pause` and :func:`resume` to allow for flow-control.
+
+ Example::
+
+ def should_recover(exc):
+ return (
+ isinstance(exc, grpc.RpcError) and
+ exc.code() == grpc.StatusCode.UNVAILABLE)
+
+ initial_request = example_pb2.StreamingRpcRequest(
+ setting='example')
+
+ rpc = ResumeableBidiRpc(
+ stub.StreamingRpc,
+ initial_request=initial_request,
+ should_recover=should_recover)
+
+ def on_response(response):
+ print(response)
+
+ consumer = BackgroundConsumer(rpc, on_response)
+ consumer.start()
+
+ Note that error handling *must* be done by using the provided
+ ``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit
+ whenever the RPC itself exits and will not provide any error details.
+
+ Args:
+ bidi_rpc (BidiRpc): The RPC to consume. Should not have been
+ ``open()``ed yet.
+ on_response (Callable[[protobuf.Message], None]): The callback to
+ be called for every response on the stream.
+ """
+
+ def __init__(self, bidi_rpc, on_response):
+ self._bidi_rpc = bidi_rpc
+ self._on_response = on_response
+ self._paused = False
+ self._wake = threading.Condition()
+ self._thread = None
+ self._operational_lock = threading.Lock()
+
+ def _on_call_done(self, future):
+ # Resume the thread if it's paused, this prevents blocking forever
+ # when the RPC has terminated.
+ self.resume()
+
+ def _thread_main(self, ready):
+ try:
+ ready.set()
+ self._bidi_rpc.add_done_callback(self._on_call_done)
+ self._bidi_rpc.open()
+
+ while self._bidi_rpc.is_active:
+ # Do not allow the paused status to change at all during this
+ # section. There is a condition where we could be resumed
+ # between checking if we are paused and calling wake.wait(),
+ # which means that we will miss the notification to wake up
+ # (oops!) and wait for a notification that will never come.
+ # Keeping the lock throughout avoids that.
+ # In the future, we could use `Condition.wait_for` if we drop
+ # Python 2.7.
+ # See: https://github.com/googleapis/python-api-core/issues/211
+ with self._wake:
+ while self._paused:
+ _LOGGER.debug("paused, waiting for waking.")
+ self._wake.wait()
+ _LOGGER.debug("woken.")
+
+ _LOGGER.debug("waiting for recv.")
+ response = self._bidi_rpc.recv()
+ _LOGGER.debug("recved response.")
+ self._on_response(response)
+
+ except exceptions.GoogleAPICallError as exc:
+ _LOGGER.debug(
+ "%s caught error %s and will exit. Generally this is due to "
+ "the RPC itself being cancelled and the error will be "
+ "surfaced to the calling code.",
+ _BIDIRECTIONAL_CONSUMER_NAME,
+ exc,
+ exc_info=True,
+ )
+
+ except Exception as exc:
+ _LOGGER.exception(
+ "%s caught unexpected exception %s and will exit.",
+ _BIDIRECTIONAL_CONSUMER_NAME,
+ exc,
+ )
+
+ _LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
+
+ def start(self):
+ """Start the background thread and begin consuming the thread."""
+ with self._operational_lock:
+ ready = threading.Event()
+ thread = threading.Thread(
+ name=_BIDIRECTIONAL_CONSUMER_NAME,
+ target=self._thread_main,
+ args=(ready,),
+ )
+ thread.daemon = True
+ thread.start()
+ # Other parts of the code rely on `thread.is_alive` which
+ # isn't sufficient to know if a thread is active, just that it may
+ # soon be active. This can cause races. Further protect
+ # against races by using a ready event and wait on it to be set.
+ ready.wait()
+ self._thread = thread
+ _LOGGER.debug("Started helper thread %s", thread.name)
+
+ def stop(self):
+ """Stop consuming the stream and shutdown the background thread."""
+ with self._operational_lock:
+ self._bidi_rpc.close()
+
+ if self._thread is not None:
+ # Resume the thread to wake it up in case it is sleeping.
+ self.resume()
+ # The daemonized thread may itself block, so don't wait
+ # for it longer than a second.
+ self._thread.join(1.0)
+ if self._thread.is_alive(): # pragma: NO COVER
+ _LOGGER.warning("Background thread did not exit.")
+
+ self._thread = None
+
+ @property
+ def is_active(self):
+ """bool: True if the background thread is active."""
+ return self._thread is not None and self._thread.is_alive()
+
+ def pause(self):
+ """Pauses the response stream.
+
+ This does *not* pause the request stream.
+ """
+ with self._wake:
+ self._paused = True
+
+ def resume(self):
+ """Resumes the response stream."""
+ with self._wake:
+ self._paused = False
+ self._wake.notify_all()
+
+ @property
+ def is_paused(self):
+ """bool: True if the response stream is paused."""
+ return self._paused
diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py
new file mode 100644
index 0000000..e093ffd
--- /dev/null
+++ b/google/api_core/client_info.py
@@ -0,0 +1,107 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for providing client information.
+
+Client information is used to send information about the calling client,
+such as the library and Python version, to API services.
+"""
+
+import platform
+from typing import Union
+
+import pkg_resources
+
+from google.api_core import version as api_core_version
+
+_PY_VERSION = platform.python_version()
+_API_CORE_VERSION = api_core_version.__version__
+
+_GRPC_VERSION: Union[str, None]
+
+try:
+ _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version
+except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GRPC_VERSION = None
+
+
+class ClientInfo(object):
+ """Client information used to generate a user-agent for API calls.
+
+ This user-agent information is sent along with API calls to allow the
+ receiving service to do analytics on which versions of Python and Google
+ libraries are being used.
+
+ Args:
+ python_version (str): The Python interpreter version, for example,
+ ``'3.9.6'``.
+ grpc_version (Optional[str]): The gRPC library version.
+ api_core_version (str): The google-api-core library version.
+ gapic_version (Optional[str]): The sversion of gapic-generated client
+ library, if the library was generated by gapic.
+ client_library_version (Optional[str]): The version of the client
+ library, generally used if the client library was not generated
+ by gapic or if additional functionality was built on top of
+ a gapic client library.
+ user_agent (Optional[str]): Prefix to the user agent header. This is
+ used to supply information such as application name or partner tool.
+ Recommended format: ``application-or-tool-ID/major.minor.version``.
+ rest_version (Optional[str]): The requests library version.
+ """
+
+ def __init__(
+ self,
+ python_version=_PY_VERSION,
+ grpc_version=_GRPC_VERSION,
+ api_core_version=_API_CORE_VERSION,
+ gapic_version=None,
+ client_library_version=None,
+ user_agent=None,
+ rest_version=None,
+ ):
+ self.python_version = python_version
+ self.grpc_version = grpc_version
+ self.api_core_version = api_core_version
+ self.gapic_version = gapic_version
+ self.client_library_version = client_library_version
+ self.user_agent = user_agent
+ self.rest_version = rest_version
+
+ def to_user_agent(self):
+ """Returns the user-agent string for this client info."""
+
+ # Note: the order here is important as the internal metrics system
+ # expects these items to be in specific locations.
+ ua = ""
+
+ if self.user_agent is not None:
+ ua += "{user_agent} "
+
+ ua += "gl-python/{python_version} "
+
+ if self.grpc_version is not None:
+ ua += "grpc/{grpc_version} "
+
+ if self.rest_version is not None:
+ ua += "rest/{rest_version} "
+
+ ua += "gax/{api_core_version} "
+
+ if self.gapic_version is not None:
+ ua += "gapic/{gapic_version} "
+
+ if self.client_library_version is not None:
+ ua += "gccl/{client_library_version} "
+
+ return ua.format(**self.__dict__).strip()
diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py
new file mode 100644
index 0000000..be5523d
--- /dev/null
+++ b/google/api_core/client_options.py
@@ -0,0 +1,116 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client options class.
+
+Client options provide a consistent interface for user options to be defined
+across clients.
+
+You can pass a client options object to a client.
+
+.. code-block:: python
+
+ from google.api_core.client_options import ClientOptions
+ from google.cloud.vision_v1 import ImageAnnotatorClient
+
+ def get_client_cert():
+ # code to load client certificate and private key.
+ return client_cert_bytes, client_private_key_bytes
+
+ options = ClientOptions(api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert)
+
+ client = ImageAnnotatorClient(client_options=options)
+
+You can also pass a mapping object.
+
+.. code-block:: python
+
+ from google.cloud.vision_v1 import ImageAnnotatorClient
+
+ client = ImageAnnotatorClient(
+ client_options={
+ "api_endpoint": "foo.googleapis.com",
+ "client_cert_source" : get_client_cert
+ })
+
+
+"""
+
+
+class ClientOptions(object):
+ """Client Options used to set options on clients.
+
+ Args:
+ api_endpoint (Optional[str]): The desired API endpoint, e.g.,
+ compute.googleapis.com
+ client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback
+ which returns client certificate bytes and private key bytes both in
+ PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
+ are mutually exclusive.
+ client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]):
+ A callback which returns client certificate file path, encrypted
+ private key file path, and the passphrase bytes.``client_cert_source``
+ and ``client_encrypted_cert_source`` are mutually exclusive.
+ quota_project_id (Optional[str]): A project name that a client's
+ quota belongs to.
+ credentials_file (Optional[str]): A path to a file storing credentials.
+ scopes (Optional[Sequence[str]]): OAuth access token override scopes.
+
+ Raises:
+ ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source``
+ are provided.
+ """
+
+ def __init__(
+ self,
+ api_endpoint=None,
+ client_cert_source=None,
+ client_encrypted_cert_source=None,
+ quota_project_id=None,
+ credentials_file=None,
+ scopes=None,
+ ):
+ if client_cert_source and client_encrypted_cert_source:
+ raise ValueError(
+ "client_cert_source and client_encrypted_cert_source are mutually exclusive"
+ )
+ self.api_endpoint = api_endpoint
+ self.client_cert_source = client_cert_source
+ self.client_encrypted_cert_source = client_encrypted_cert_source
+ self.quota_project_id = quota_project_id
+ self.credentials_file = credentials_file
+ self.scopes = scopes
+
+ def __repr__(self):
+ return "ClientOptions: " + repr(self.__dict__)
+
+
+def from_dict(options):
+ """Construct a client options object from a mapping object.
+
+ Args:
+ options (collections.abc.Mapping): A mapping object with client options.
+ See the docstring for ClientOptions for details on valid arguments.
+ """
+
+ client_options = ClientOptions()
+
+ for key, value in options.items():
+ if hasattr(client_options, key):
+ setattr(client_options, key, value)
+ else:
+ raise ValueError("ClientOptions does not accept an option '" + key + "'")
+
+ return client_options
diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py
new file mode 100644
index 0000000..78268ef
--- /dev/null
+++ b/google/api_core/datetime_helpers.py
@@ -0,0 +1,298 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`datetime`."""
+
+import calendar
+import datetime
+import re
+
+from google.protobuf import timestamp_pb2
+
+
+_UTC_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
+_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
+_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
+# datetime.strptime cannot handle nanosecond precision: parse w/ regex
+_RFC3339_NANOS = re.compile(
+ r"""
+ (?P<no_fraction>
+ \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
+ )
+ ( # Optional decimal part
+ \. # decimal point
+ (?P<nanos>\d{1,9}) # nanoseconds, maybe truncated
+ )?
+ Z # Zulu
+""",
+ re.VERBOSE,
+)
+
+
+def utcnow():
+ """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests."""
+ return datetime.datetime.utcnow()
+
+
+def to_milliseconds(value):
+ """Convert a zone-aware datetime to milliseconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Milliseconds since the unix epoch.
+ """
+ micros = to_microseconds(value)
+ return micros // 1000
+
+
+def from_microseconds(value):
+ """Convert timestamp in microseconds since the unix epoch to datetime.
+
+ Args:
+ value (float): The timestamp to convert, in microseconds.
+
+ Returns:
+ datetime.datetime: The datetime object equivalent to the timestamp in
+ UTC.
+ """
+ return _UTC_EPOCH + datetime.timedelta(microseconds=value)
+
+
+def to_microseconds(value):
+ """Convert a datetime to microseconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Microseconds since the unix epoch.
+ """
+ if not value.tzinfo:
+ value = value.replace(tzinfo=datetime.timezone.utc)
+ # Regardless of what timezone is on the value, convert it to UTC.
+ value = value.astimezone(datetime.timezone.utc)
+ # Convert the datetime to a microsecond timestamp.
+ return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
+
+
+def from_iso8601_date(value):
+ """Convert a ISO8601 date string to a date.
+
+ Args:
+ value (str): The ISO8601 date string.
+
+ Returns:
+ datetime.date: A date equivalent to the date string.
+ """
+ return datetime.datetime.strptime(value, "%Y-%m-%d").date()
+
+
+def from_iso8601_time(value):
+ """Convert a zoneless ISO8601 time string to a time.
+
+ Args:
+ value (str): The ISO8601 time string.
+
+ Returns:
+ datetime.time: A time equivalent to the time string.
+ """
+ return datetime.datetime.strptime(value, "%H:%M:%S").time()
+
+
+def from_rfc3339(value):
+ """Convert an RFC3339-format timestamp to a native datetime.
+
+ Supported formats include those without fractional seconds, or with
+ any fraction up to nanosecond precision.
+
+ .. note::
+ Python datetimes do not support nanosecond precision; this function
+ therefore truncates such values to microseconds.
+
+ Args:
+ value (str): The RFC3339 string to convert.
+
+ Returns:
+ datetime.datetime: The datetime object equivalent to the timestamp
+ in UTC.
+
+ Raises:
+ ValueError: If the timestamp does not match the RFC3339
+ regular expression.
+ """
+ with_nanos = _RFC3339_NANOS.match(value)
+
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: {!r}, does not match pattern: {!r}".format(
+ value, _RFC3339_NANOS.pattern
+ )
+ )
+
+ bare_seconds = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+
+ if fraction is None:
+ micros = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10 ** scale)
+ micros = nanos // 1000
+
+ return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc)
+
+
+from_rfc3339_nanos = from_rfc3339 # from_rfc3339_nanos method was deprecated.
+
+
+def to_rfc3339(value, ignore_zone=True):
+ """Convert a datetime to an RFC3339 timestamp string.
+
+ Args:
+ value (datetime.datetime):
+ The datetime object to be converted to a string.
+ ignore_zone (bool): If True, then the timezone (if any) of the
+ datetime object is ignored and the datetime is treated as UTC.
+
+ Returns:
+ str: The RFC3339 formated string representing the datetime.
+ """
+ if not ignore_zone and value.tzinfo is not None:
+ # Convert to UTC and remove the time zone info.
+ value = value.replace(tzinfo=None) - value.utcoffset()
+
+ return value.strftime(_RFC3339_MICROS)
+
+
+class DatetimeWithNanoseconds(datetime.datetime):
+ """Track nanosecond in addition to normal datetime attrs.
+
+ Nanosecond can be passed only as a keyword argument.
+ """
+
+ __slots__ = ("_nanosecond",)
+
+ # pylint: disable=arguments-differ
+ def __new__(cls, *args, **kw):
+ nanos = kw.pop("nanosecond", 0)
+ if nanos > 0:
+ if "microsecond" in kw:
+ raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
+ kw["microsecond"] = nanos // 1000
+ inst = datetime.datetime.__new__(cls, *args, **kw)
+ inst._nanosecond = nanos or 0
+ return inst
+
+ # pylint: disable=arguments-differ
+
+ @property
+ def nanosecond(self):
+ """Read-only: nanosecond precision."""
+ return self._nanosecond
+
+ def rfc3339(self):
+ """Return an RFC3339-compliant timestamp.
+
+ Returns:
+ (str): Timestamp string according to RFC3339 spec.
+ """
+ if self._nanosecond == 0:
+ return to_rfc3339(self)
+ nanos = str(self._nanosecond).rjust(9, "0").rstrip("0")
+ return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos)
+
+ @classmethod
+ def from_rfc3339(cls, stamp):
+ """Parse RFC3339-compliant timestamp, preserving nanoseconds.
+
+ Args:
+ stamp (str): RFC3339 stamp, with up to nanosecond precision
+
+ Returns:
+ :class:`DatetimeWithNanoseconds`:
+ an instance matching the timestamp string
+
+ Raises:
+ ValueError: if `stamp` does not match the expected format
+ """
+ with_nanos = _RFC3339_NANOS.match(stamp)
+ if with_nanos is None:
+ raise ValueError(
+ "Timestamp: {}, does not match pattern: {}".format(
+ stamp, _RFC3339_NANOS.pattern
+ )
+ )
+ bare = datetime.datetime.strptime(
+ with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+ )
+ fraction = with_nanos.group("nanos")
+ if fraction is None:
+ nanos = 0
+ else:
+ scale = 9 - len(fraction)
+ nanos = int(fraction) * (10 ** scale)
+ return cls(
+ bare.year,
+ bare.month,
+ bare.day,
+ bare.hour,
+ bare.minute,
+ bare.second,
+ nanosecond=nanos,
+ tzinfo=datetime.timezone.utc,
+ )
+
+ def timestamp_pb(self):
+ """Return a timestamp message.
+
+ Returns:
+ (:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
+ """
+ inst = (
+ self
+ if self.tzinfo is not None
+ else self.replace(tzinfo=datetime.timezone.utc)
+ )
+ delta = inst - _UTC_EPOCH
+ seconds = int(delta.total_seconds())
+ nanos = self._nanosecond or self.microsecond * 1000
+ return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+
+ @classmethod
+ def from_timestamp_pb(cls, stamp):
+ """Parse RFC3339-compliant timestamp, preserving nanoseconds.
+
+ Args:
+ stamp (:class:`~google.protobuf.timestamp_pb2.Timestamp`): timestamp message
+
+ Returns:
+ :class:`DatetimeWithNanoseconds`:
+ an instance matching the timestamp message
+ """
+ microseconds = int(stamp.seconds * 1e6)
+ bare = from_microseconds(microseconds)
+ return cls(
+ bare.year,
+ bare.month,
+ bare.day,
+ bare.hour,
+ bare.minute,
+ bare.second,
+ nanosecond=stamp.nanos,
+ tzinfo=datetime.timezone.utc,
+ )
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
new file mode 100644
index 0000000..6b1b6f7
--- /dev/null
+++ b/google/api_core/exceptions.py
@@ -0,0 +1,546 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Exceptions raised by Google API core & clients.
+
+This module provides base classes for all errors raised by libraries based
+on :mod:`google.api_core`, including both HTTP and gRPC clients.
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import http.client
+from typing import Dict
+from typing import Union
+
+from google.rpc import error_details_pb2
+
+try:
+ import grpc
+ from grpc_status import rpc_status
+except ImportError: # pragma: NO COVER
+ grpc = None
+ rpc_status = None
+
+# Lookup tables for mapping exceptions from HTTP and gRPC transports.
+# Populated by _GoogleAPICallErrorMeta
+_HTTP_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+_GRPC_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+
+# Additional lookup table to map integer status codes to grpc status code
+# grpc does not currently support initializing enums from ints
+# i.e., grpc.StatusCode(5) raises an error
+_INT_TO_GRPC_CODE = {}
+if grpc is not None: # pragma: no branch
+ for x in grpc.StatusCode:
+ _INT_TO_GRPC_CODE[x.value[0]] = x
+
+
+class GoogleAPIError(Exception):
+ """Base class for all exceptions raised by Google API Clients."""
+
+ pass
+
+
+class DuplicateCredentialArgs(GoogleAPIError):
+ """Raised when multiple credentials are passed."""
+
+ pass
+
+
+class RetryError(GoogleAPIError):
+ """Raised when a function has exhausted all of its available retries.
+
+ Args:
+ message (str): The exception message.
+ cause (Exception): The last exception raised when retring the
+ function.
+ """
+
+ def __init__(self, message, cause):
+ super(RetryError, self).__init__(message)
+ self.message = message
+ self._cause = cause
+
+ @property
+ def cause(self):
+ """The last exception raised when retrying the function."""
+ return self._cause
+
+ def __str__(self):
+ return "{}, last exception: {}".format(self.message, self.cause)
+
+
+class _GoogleAPICallErrorMeta(type):
+ """Metaclass for registering GoogleAPICallError subclasses."""
+
+ def __new__(mcs, name, bases, class_dict):
+ cls = type.__new__(mcs, name, bases, class_dict)
+ if cls.code is not None:
+ _HTTP_CODE_TO_EXCEPTION.setdefault(cls.code, cls)
+ if cls.grpc_status_code is not None:
+ _GRPC_CODE_TO_EXCEPTION.setdefault(cls.grpc_status_code, cls)
+ return cls
+
+
+class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta):
+ """Base class for exceptions raised by calling API methods.
+
+ Args:
+ message (str): The exception message.
+ errors (Sequence[Any]): An optional list of error details.
+ details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details.
+ response (Union[requests.Request, grpc.Call]): The response or
+ gRPC call metadata.
+ """
+
+ code: Union[int, None] = None
+ """Optional[int]: The HTTP status code associated with this error.
+
+ This may be ``None`` if the exception does not have a direct mapping
+ to an HTTP error.
+
+ See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+ """
+
+ grpc_status_code = None
+ """Optional[grpc.StatusCode]: The gRPC status code associated with this
+ error.
+
+ This may be ``None`` if the exception does not match up to a gRPC error.
+ """
+
+ def __init__(self, message, errors=(), details=(), response=None):
+ super(GoogleAPICallError, self).__init__(message)
+ self.message = message
+ """str: The exception message."""
+ self._errors = errors
+ self._details = details
+ self._response = response
+
+ def __str__(self):
+ if self.details:
+ return "{} {} {}".format(self.code, self.message, self.details)
+ else:
+ return "{} {}".format(self.code, self.message)
+
+ @property
+ def errors(self):
+ """Detailed error information.
+
+ Returns:
+ Sequence[Any]: A list of additional error details.
+ """
+ return list(self._errors)
+
+ @property
+ def details(self):
+ """Information contained in google.rpc.status.details.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto
+
+ Returns:
+ Sequence[Any]: A list of structured objects from error_details.proto
+ """
+ return list(self._details)
+
+ @property
+ def response(self):
+ """Optional[Union[requests.Request, grpc.Call]]: The response or
+ gRPC call metadata."""
+ return self._response
+
+
+class Redirection(GoogleAPICallError):
+ """Base class for for all redirection (HTTP 3xx) responses."""
+
+
+class MovedPermanently(Redirection):
+ """Exception mapping a ``301 Moved Permanently`` response."""
+
+ code = http.client.MOVED_PERMANENTLY
+
+
+class NotModified(Redirection):
+ """Exception mapping a ``304 Not Modified`` response."""
+
+ code = http.client.NOT_MODIFIED
+
+
+class TemporaryRedirect(Redirection):
+ """Exception mapping a ``307 Temporary Redirect`` response."""
+
+ code = http.client.TEMPORARY_REDIRECT
+
+
+class ResumeIncomplete(Redirection):
+ """Exception mapping a ``308 Resume Incomplete`` response.
+
+ .. note:: :attr:`http.client.PERMANENT_REDIRECT` is ``308``, but Google
+ APIs differ in their use of this status code.
+ """
+
+ code = 308
+
+
+class ClientError(GoogleAPICallError):
+ """Base class for all client error (HTTP 4xx) responses."""
+
+
+class BadRequest(ClientError):
+ """Exception mapping a ``400 Bad Request`` response."""
+
+ code = http.client.BAD_REQUEST
+
+
+class InvalidArgument(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.INVALID_ARGUMENT` error."""
+
+ grpc_status_code = grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None
+
+
+class FailedPrecondition(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.FAILED_PRECONDITION`
+ error."""
+
+ grpc_status_code = grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None
+
+
+class OutOfRange(BadRequest):
+ """Exception mapping a :attr:`grpc.StatusCode.OUT_OF_RANGE` error."""
+
+ grpc_status_code = grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None
+
+
+class Unauthorized(ClientError):
+ """Exception mapping a ``401 Unauthorized`` response."""
+
+ code = http.client.UNAUTHORIZED
+
+
+class Unauthenticated(Unauthorized):
+ """Exception mapping a :attr:`grpc.StatusCode.UNAUTHENTICATED` error."""
+
+ grpc_status_code = grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None
+
+
+class Forbidden(ClientError):
+ """Exception mapping a ``403 Forbidden`` response."""
+
+ code = http.client.FORBIDDEN
+
+
+class PermissionDenied(Forbidden):
+ """Exception mapping a :attr:`grpc.StatusCode.PERMISSION_DENIED` error."""
+
+ grpc_status_code = grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None
+
+
+class NotFound(ClientError):
+ """Exception mapping a ``404 Not Found`` response or a
+ :attr:`grpc.StatusCode.NOT_FOUND` error."""
+
+ code = http.client.NOT_FOUND
+ grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
+
+
+class MethodNotAllowed(ClientError):
+ """Exception mapping a ``405 Method Not Allowed`` response."""
+
+ code = http.client.METHOD_NOT_ALLOWED
+
+
+class Conflict(ClientError):
+ """Exception mapping a ``409 Conflict`` response."""
+
+ code = http.client.CONFLICT
+
+
+class AlreadyExists(Conflict):
+ """Exception mapping a :attr:`grpc.StatusCode.ALREADY_EXISTS` error."""
+
+ grpc_status_code = grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None
+
+
+class Aborted(Conflict):
+ """Exception mapping a :attr:`grpc.StatusCode.ABORTED` error."""
+
+ grpc_status_code = grpc.StatusCode.ABORTED if grpc is not None else None
+
+
+class LengthRequired(ClientError):
+ """Exception mapping a ``411 Length Required`` response."""
+
+ code = http.client.LENGTH_REQUIRED
+
+
+class PreconditionFailed(ClientError):
+ """Exception mapping a ``412 Precondition Failed`` response."""
+
+ code = http.client.PRECONDITION_FAILED
+
+
+class RequestRangeNotSatisfiable(ClientError):
+ """Exception mapping a ``416 Request Range Not Satisfiable`` response."""
+
+ code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE
+
+
+class TooManyRequests(ClientError):
+ """Exception mapping a ``429 Too Many Requests`` response."""
+
+ code = http.client.TOO_MANY_REQUESTS
+
+
+class ResourceExhausted(TooManyRequests):
+ """Exception mapping a :attr:`grpc.StatusCode.RESOURCE_EXHAUSTED` error."""
+
+ grpc_status_code = grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None
+
+
+class Cancelled(ClientError):
+ """Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
+
+ # This maps to HTTP status code 499. See
+ # https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
+ code = 499
+ grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None
+
+
+class ServerError(GoogleAPICallError):
+ """Base for 5xx responses."""
+
+
+class InternalServerError(ServerError):
+ """Exception mapping a ``500 Internal Server Error`` response. or a
+ :attr:`grpc.StatusCode.INTERNAL` error."""
+
+ code = http.client.INTERNAL_SERVER_ERROR
+ grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
+
+
+class Unknown(ServerError):
+ """Exception mapping a :attr:`grpc.StatusCode.UNKNOWN` error."""
+
+ grpc_status_code = grpc.StatusCode.UNKNOWN if grpc is not None else None
+
+
+class DataLoss(ServerError):
+ """Exception mapping a :attr:`grpc.StatusCode.DATA_LOSS` error."""
+
+ grpc_status_code = grpc.StatusCode.DATA_LOSS if grpc is not None else None
+
+
+class MethodNotImplemented(ServerError):
+ """Exception mapping a ``501 Not Implemented`` response or a
+ :attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
+
+ code = http.client.NOT_IMPLEMENTED
+ grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
+
+
+class BadGateway(ServerError):
+ """Exception mapping a ``502 Bad Gateway`` response."""
+
+ code = http.client.BAD_GATEWAY
+
+
+class ServiceUnavailable(ServerError):
+ """Exception mapping a ``503 Service Unavailable`` response or a
+ :attr:`grpc.StatusCode.UNAVAILABLE` error."""
+
+ code = http.client.SERVICE_UNAVAILABLE
+ grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
+
+
+class GatewayTimeout(ServerError):
+ """Exception mapping a ``504 Gateway Timeout`` response."""
+
+ code = http.client.GATEWAY_TIMEOUT
+
+
+class DeadlineExceeded(GatewayTimeout):
+ """Exception mapping a :attr:`grpc.StatusCode.DEADLINE_EXCEEDED` error."""
+
+ grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
+
+
+def exception_class_for_http_status(status_code):
+ """Return the exception class for a specific HTTP status code.
+
+ Args:
+ status_code (int): The HTTP status code.
+
+ Returns:
+ :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
+ """
+ return _HTTP_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
+
+
+def from_http_status(status_code, message, **kwargs):
+ """Create a :class:`GoogleAPICallError` from an HTTP status code.
+
+ Args:
+ status_code (int): The HTTP status code.
+ message (str): The exception message.
+ kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
+ constructor.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ error_class = exception_class_for_http_status(status_code)
+ error = error_class(message, **kwargs)
+
+ if error.code is None:
+ error.code = status_code
+
+ return error
+
+
+def from_http_response(response):
+ """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`.
+
+ Args:
+ response (requests.Response): The HTTP response.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`, with the message and errors populated
+ from the response.
+ """
+ try:
+ payload = response.json()
+ except ValueError:
+ payload = {"error": {"message": response.text or "unknown error"}}
+
+ error_message = payload.get("error", {}).get("message", "unknown error")
+ errors = payload.get("error", {}).get("errors", ())
+ # In JSON, details are already formatted in developer-friendly way.
+ details = payload.get("error", {}).get("details", ())
+
+ message = "{method} {url}: {error}".format(
+ method=response.request.method, url=response.request.url, error=error_message
+ )
+
+ exception = from_http_status(
+ response.status_code, message, errors=errors, details=details, response=response
+ )
+ return exception
+
+
+def exception_class_for_grpc_status(status_code):
+ """Return the exception class for a specific :class:`grpc.StatusCode`.
+
+ Args:
+ status_code (grpc.StatusCode): The gRPC status code.
+
+ Returns:
+ :func:`type`: the appropriate subclass of :class:`GoogleAPICallError`.
+ """
+ return _GRPC_CODE_TO_EXCEPTION.get(status_code, GoogleAPICallError)
+
+
+def from_grpc_status(status_code, message, **kwargs):
+ """Create a :class:`GoogleAPICallError` from a :class:`grpc.StatusCode`.
+
+ Args:
+ status_code (Union[grpc.StatusCode, int]): The gRPC status code.
+ message (str): The exception message.
+ kwargs: Additional arguments passed to the :class:`GoogleAPICallError`
+ constructor.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+
+ if isinstance(status_code, int):
+ status_code = _INT_TO_GRPC_CODE.get(status_code, status_code)
+
+ error_class = exception_class_for_grpc_status(status_code)
+ error = error_class(message, **kwargs)
+
+ if error.grpc_status_code is None:
+ error.grpc_status_code = status_code
+
+ return error
+
+
+def _is_informative_grpc_error(rpc_exc):
+ return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details")
+
+
+def _parse_grpc_error_details(rpc_exc):
+ try:
+ status = rpc_status.from_call(rpc_exc)
+ except NotImplementedError: # workaround
+ return []
+
+ if not status:
+ return []
+
+ possible_errors = [
+ error_details_pb2.BadRequest,
+ error_details_pb2.PreconditionFailure,
+ error_details_pb2.QuotaFailure,
+ error_details_pb2.ErrorInfo,
+ error_details_pb2.RetryInfo,
+ error_details_pb2.ResourceInfo,
+ error_details_pb2.RequestInfo,
+ error_details_pb2.DebugInfo,
+ error_details_pb2.Help,
+ error_details_pb2.LocalizedMessage,
+ ]
+ error_details = []
+ for detail in status.details:
+ matched_detail_cls = list(
+ filter(lambda x: detail.Is(x.DESCRIPTOR), possible_errors)
+ )
+ # If nothing matched, use detail directly.
+ if len(matched_detail_cls) == 0:
+ info = detail
+ else:
+ info = matched_detail_cls[0]()
+ detail.Unpack(info)
+ error_details.append(info)
+ return error_details
+
+
+def from_grpc_error(rpc_exc):
+ """Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
+
+ Args:
+ rpc_exc (grpc.RpcError): The gRPC error.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`.
+ """
+ # NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
+ # However, check for grpc.RpcError breaks backward compatibility.
+ if isinstance(rpc_exc, grpc.Call) or _is_informative_grpc_error(rpc_exc):
+ return from_grpc_status(
+ rpc_exc.code(),
+ rpc_exc.details(),
+ errors=(rpc_exc,),
+ details=_parse_grpc_error_details(rpc_exc),
+ response=rpc_exc,
+ )
+ else:
+ return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/google/api_core/future/__init__.py b/google/api_core/future/__init__.py
new file mode 100644
index 0000000..3768b2c
--- /dev/null
+++ b/google/api_core/future/__init__.py
@@ -0,0 +1,19 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for dealing with asynchronous operations."""
+
+from google.api_core.future.base import Future
+
+__all__ = ["Future"]
diff --git a/google/api_core/future/_helpers.py b/google/api_core/future/_helpers.py
new file mode 100644
index 0000000..9e88ca9
--- /dev/null
+++ b/google/api_core/future/_helpers.py
@@ -0,0 +1,39 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Private helpers for futures."""
+
+import logging
+import threading
+
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def start_daemon_thread(*args, **kwargs):
+ """Starts a thread and marks it as a daemon thread."""
+ thread = threading.Thread(*args, **kwargs)
+ thread.daemon = True
+ thread.start()
+ return thread
+
+
+def safe_invoke_callback(callback, *args, **kwargs):
+ """Invoke a callback, swallowing and logging any exceptions."""
+ # pylint: disable=bare-except
+ # We intentionally want to swallow all exceptions.
+ try:
+ return callback(*args, **kwargs)
+ except Exception:
+ _LOGGER.exception("Error while executing Future callback.")
diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py
new file mode 100644
index 0000000..88c183f
--- /dev/null
+++ b/google/api_core/future/async_future.py
@@ -0,0 +1,162 @@
+# Copyright 2020, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO implementation of the abstract base Future class."""
+
+import asyncio
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import retry_async
+from google.api_core.future import base
+
+
+class _OperationNotComplete(Exception):
+ """Private exception used for polling via retry."""
+
+ pass
+
+
+RETRY_PREDICATE = retry.if_exception_type(
+ _OperationNotComplete,
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+)
+DEFAULT_RETRY = retry_async.AsyncRetry(predicate=RETRY_PREDICATE)
+
+
+class AsyncFuture(base.Future):
+ """A Future that polls peer service to self-update.
+
+ The :meth:`done` method should be implemented by subclasses. The polling
+ behavior will repeatedly call ``done`` until it returns True.
+
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
+
+ Args:
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(self, retry=DEFAULT_RETRY):
+ super().__init__()
+ self._retry = retry
+ self._future = asyncio.get_event_loop().create_future()
+ self._background_task = None
+
+ async def done(self, retry=DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ raise NotImplementedError()
+
+ async def _done_or_raise(self):
+ """Check if the future is done and raise if it's not."""
+ result = await self.done()
+ if not result:
+ raise _OperationNotComplete()
+
+ async def running(self):
+ """True if the operation is currently running."""
+ result = await self.done()
+ return not result
+
+ async def _blocking_poll(self, timeout=None):
+ """Poll and await for the Future to be resolved.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+ """
+ if self._future.done():
+ return
+
+ retry_ = self._retry.with_deadline(timeout)
+
+ try:
+ await retry_(self._done_or_raise)()
+ except exceptions.RetryError:
+ raise asyncio.TimeoutError(
+ "Operation did not complete within the designated " "timeout."
+ )
+
+ async def result(self, timeout=None):
+ """Get the result of the operation.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ google.protobuf.Message: The Operation's result.
+
+ Raises:
+ google.api_core.GoogleAPICallError: If the operation errors or if
+ the timeout is reached before the operation completes.
+ """
+ await self._blocking_poll(timeout=timeout)
+ return self._future.result()
+
+ async def exception(self, timeout=None):
+ """Get the exception from the operation.
+
+ Args:
+ timeout (int): How long to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ Optional[google.api_core.GoogleAPICallError]: The operation's
+ error.
+ """
+ await self._blocking_poll(timeout=timeout)
+ return self._future.exception()
+
+ def add_done_callback(self, fn):
+ """Add a callback to be executed when the operation is complete.
+
+ If the operation is completed, the callback will be scheduled onto the
+ event loop. Otherwise, the callback will be stored and invoked when the
+ future is done.
+
+ Args:
+ fn (Callable[Future]): The callback to execute when the operation
+ is complete.
+ """
+ if self._background_task is None:
+ self._background_task = asyncio.get_event_loop().create_task(
+ self._blocking_poll()
+ )
+ self._future.add_done_callback(fn)
+
+ def set_result(self, result):
+ """Set the Future's result."""
+ self._future.set_result(result)
+
+ def set_exception(self, exception):
+ """Set the Future's exception."""
+ self._future.set_exception(exception)
diff --git a/google/api_core/future/base.py b/google/api_core/future/base.py
new file mode 100644
index 0000000..f300586
--- /dev/null
+++ b/google/api_core/future/base.py
@@ -0,0 +1,64 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstract and helper bases for Future implementations."""
+
+import abc
+
+
+class Future(object, metaclass=abc.ABCMeta):
+ # pylint: disable=missing-docstring
+ # We inherit the interfaces here from concurrent.futures.
+
+ """Future interface.
+
+ This interface is based on :class:`concurrent.futures.Future`.
+ """
+
+ @abc.abstractmethod
+ def cancel(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancelled(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def done(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ # pylint: disable=invalid-name
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_result(self, result):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_exception(self, exception):
+ raise NotImplementedError()
diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py
new file mode 100644
index 0000000..02e680f
--- /dev/null
+++ b/google/api_core/future/polling.py
@@ -0,0 +1,193 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstract and helper bases for Future implementations."""
+
+import abc
+import concurrent.futures
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core.future import _helpers
+from google.api_core.future import base
+
+
+class _OperationNotComplete(Exception):
+ """Private exception used for polling via retry."""
+
+ pass
+
+
+RETRY_PREDICATE = retry.if_exception_type(
+ _OperationNotComplete,
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+ exceptions.ServiceUnavailable,
+)
+DEFAULT_RETRY = retry.Retry(predicate=RETRY_PREDICATE)
+
+
+class PollingFuture(base.Future):
+ """A Future that needs to poll some service to check its status.
+
+ The :meth:`done` method should be implemented by subclasses. The polling
+ behavior will repeatedly call ``done`` until it returns True.
+
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
+
+ Args:
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(self, retry=DEFAULT_RETRY):
+ super(PollingFuture, self).__init__()
+ self._retry = retry
+ self._result = None
+ self._exception = None
+ self._result_set = False
+ """bool: Set to True when the result has been set via set_result or
+ set_exception."""
+ self._polling_thread = None
+ self._done_callbacks = []
+
+ @abc.abstractmethod
+ def done(self, retry=DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ raise NotImplementedError()
+
+ def _done_or_raise(self, retry=DEFAULT_RETRY):
+ """Check if the future is done and raise if it's not."""
+ kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
+
+ if not self.done(**kwargs):
+ raise _OperationNotComplete()
+
+ def running(self):
+ """True if the operation is currently running."""
+ return not self.done()
+
+ def _blocking_poll(self, timeout=None, retry=DEFAULT_RETRY):
+ """Poll and wait for the Future to be resolved.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+ """
+ if self._result_set:
+ return
+
+ retry_ = self._retry.with_deadline(timeout)
+
+ try:
+ kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
+ retry_(self._done_or_raise)(**kwargs)
+ except exceptions.RetryError:
+ raise concurrent.futures.TimeoutError(
+ "Operation did not complete within the designated " "timeout."
+ )
+
+ def result(self, timeout=None, retry=DEFAULT_RETRY):
+ """Get the result of the operation, blocking if necessary.
+
+ Args:
+ timeout (int):
+ How long (in seconds) to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ google.protobuf.Message: The Operation's result.
+
+ Raises:
+ google.api_core.GoogleAPICallError: If the operation errors or if
+ the timeout is reached before the operation completes.
+ """
+ kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
+ self._blocking_poll(timeout=timeout, **kwargs)
+
+ if self._exception is not None:
+ # pylint: disable=raising-bad-type
+ # Pylint doesn't recognize that this is valid in this case.
+ raise self._exception
+
+ return self._result
+
+ def exception(self, timeout=None):
+ """Get the exception from the operation, blocking if necessary.
+
+ Args:
+ timeout (int): How long to wait for the operation to complete.
+ If None, wait indefinitely.
+
+ Returns:
+ Optional[google.api_core.GoogleAPICallError]: The operation's
+ error.
+ """
+ self._blocking_poll(timeout=timeout)
+ return self._exception
+
+ def add_done_callback(self, fn):
+ """Add a callback to be executed when the operation is complete.
+
+ If the operation is not already complete, this will start a helper
+ thread to poll for the status of the operation in the background.
+
+ Args:
+ fn (Callable[Future]): The callback to execute when the operation
+ is complete.
+ """
+ if self._result_set:
+ _helpers.safe_invoke_callback(fn, self)
+ return
+
+ self._done_callbacks.append(fn)
+
+ if self._polling_thread is None:
+ # The polling thread will exit on its own as soon as the operation
+ # is done.
+ self._polling_thread = _helpers.start_daemon_thread(
+ target=self._blocking_poll
+ )
+
+ def _invoke_callbacks(self, *args, **kwargs):
+ """Invoke all done callbacks."""
+ for callback in self._done_callbacks:
+ _helpers.safe_invoke_callback(callback, *args, **kwargs)
+
+ def set_result(self, result):
+ """Set the Future's result."""
+ self._result = result
+ self._result_set = True
+ self._invoke_callbacks(self)
+
+ def set_exception(self, exception):
+ """Set the Future's exception."""
+ self._exception = exception
+ self._result_set = True
+ self._invoke_callbacks(self)
diff --git a/google/api_core/gapic_v1/__init__.py b/google/api_core/gapic_v1/__init__.py
new file mode 100644
index 0000000..e5b7ad3
--- /dev/null
+++ b/google/api_core/gapic_v1/__init__.py
@@ -0,0 +1,29 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.api_core.gapic_v1 import client_info
+from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1 import config_async
+from google.api_core.gapic_v1 import method
+from google.api_core.gapic_v1 import method_async
+from google.api_core.gapic_v1 import routing_header
+
+__all__ = [
+ "client_info",
+ "config",
+ "config_async",
+ "method",
+ "method_async",
+ "routing_header",
+]
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
new file mode 100644
index 0000000..fab0f54
--- /dev/null
+++ b/google/api_core/gapic_v1/client_info.py
@@ -0,0 +1,55 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for providing client information.
+
+Client information is used to send information about the calling client,
+such as the library and Python version, to API services.
+"""
+
+from google.api_core import client_info
+
+
+METRICS_METADATA_KEY = "x-goog-api-client"
+
+
+class ClientInfo(client_info.ClientInfo):
+ """Client information used to generate a user-agent for API calls.
+
+ This user-agent information is sent along with API calls to allow the
+ receiving service to do analytics on which versions of Python and Google
+ libraries are being used.
+
+ Args:
+ python_version (str): The Python interpreter version, for example,
+ ``'3.9.6'``.
+ grpc_version (Optional[str]): The gRPC library version.
+ api_core_version (str): The google-api-core library version.
+ gapic_version (Optional[str]): The sversion of gapic-generated client
+ library, if the library was generated by gapic.
+ client_library_version (Optional[str]): The version of the client
+ library, generally used if the client library was not generated
+ by gapic or if additional functionality was built on top of
+ a gapic client library.
+ user_agent (Optional[str]): Prefix to the user agent header. This is
+ used to supply information such as application name or partner tool.
+ Recommended format: ``application-or-tool-ID/major.minor.version``.
+ """
+
+ def to_grpc_metadata(self):
+ """Returns the gRPC metadata for this client info."""
+ return (METRICS_METADATA_KEY, self.to_user_agent())
+
+
+DEFAULT_CLIENT_INFO = ClientInfo()
diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py
new file mode 100644
index 0000000..9c72287
--- /dev/null
+++ b/google/api_core/gapic_v1/config.py
@@ -0,0 +1,166 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for loading gapic configuration data.
+
+The Google API generator creates supplementary configuration for each RPC
+method to tell the client library how to deal with retries and timeouts.
+"""
+
+import collections
+
+import grpc
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import timeout
+
+
+_MILLIS_PER_SECOND = 1000.0
+
+
+def _exception_class_for_grpc_status_name(name):
+ """Returns the Google API exception class for a gRPC error code name.
+
+ Args:
+ name (str): The name of the gRPC status code, for example,
+ ``UNAVAILABLE``.
+
+ Returns:
+ :func:`type`: The appropriate subclass of
+ :class:`google.api_core.exceptions.GoogleAPICallError`.
+ """
+ return exceptions.exception_class_for_grpc_status(getattr(grpc.StatusCode, name))
+
+
+def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
+ """Creates a Retry object given a gapic retry configuration.
+
+ Args:
+ retry_params (dict): The retry parameter values, for example::
+
+ {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000
+ }
+
+ retry_codes (sequence[str]): The list of retryable gRPC error code
+ names.
+
+ Returns:
+ google.api_core.retry.Retry: The default retry object for the method.
+ """
+ exception_classes = [
+ _exception_class_for_grpc_status_name(code) for code in retry_codes
+ ]
+ return retry_impl(
+ retry.if_exception_type(*exception_classes),
+ initial=(retry_params["initial_retry_delay_millis"] / _MILLIS_PER_SECOND),
+ maximum=(retry_params["max_retry_delay_millis"] / _MILLIS_PER_SECOND),
+ multiplier=retry_params["retry_delay_multiplier"],
+ deadline=retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND,
+ )
+
+
+def _timeout_from_retry_config(retry_params):
+ """Creates a ExponentialTimeout object given a gapic retry configuration.
+
+ Args:
+ retry_params (dict): The retry parameter values, for example::
+
+ {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000
+ }
+
+ Returns:
+ google.api_core.retry.ExponentialTimeout: The default time object for
+ the method.
+ """
+ return timeout.ExponentialTimeout(
+ initial=(retry_params["initial_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+ maximum=(retry_params["max_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+ multiplier=retry_params["rpc_timeout_multiplier"],
+ deadline=(retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND),
+ )
+
+
+MethodConfig = collections.namedtuple("MethodConfig", ["retry", "timeout"])
+
+
+def parse_method_configs(interface_config, retry_impl=retry.Retry):
+ """Creates default retry and timeout objects for each method in a gapic
+ interface config.
+
+ Args:
+ interface_config (Mapping): The interface config section of the full
+ gapic library config. For example, If the full configuration has
+ an interface named ``google.example.v1.ExampleService`` you would
+ pass in just that interface's configuration, for example
+ ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
+ retry_impl (Callable): The constructor that creates a retry decorator
+ that will be applied to the method based on method configs.
+
+ Returns:
+ Mapping[str, MethodConfig]: A mapping of RPC method names to their
+ configuration.
+ """
+ # Grab all the retry codes
+ retry_codes_map = {
+ name: retry_codes
+ for name, retry_codes in interface_config.get("retry_codes", {}).items()
+ }
+
+ # Grab all of the retry params
+ retry_params_map = {
+ name: retry_params
+ for name, retry_params in interface_config.get("retry_params", {}).items()
+ }
+
+ # Iterate through all the API methods and create a flat MethodConfig
+ # instance for each one.
+ method_configs = {}
+
+ for method_name, method_params in interface_config.get("methods", {}).items():
+ retry_params_name = method_params.get("retry_params_name")
+
+ if retry_params_name is not None:
+ retry_params = retry_params_map[retry_params_name]
+ retry_ = _retry_from_retry_config(
+ retry_params,
+ retry_codes_map[method_params["retry_codes_name"]],
+ retry_impl,
+ )
+ timeout_ = _timeout_from_retry_config(retry_params)
+
+ # No retry config, so this is a non-retryable method.
+ else:
+ retry_ = None
+ timeout_ = timeout.ConstantTimeout(
+ method_params["timeout_millis"] / _MILLIS_PER_SECOND
+ )
+
+ method_configs[method_name] = MethodConfig(retry=retry_, timeout=timeout_)
+
+ return method_configs
diff --git a/google/api_core/gapic_v1/config_async.py b/google/api_core/gapic_v1/config_async.py
new file mode 100644
index 0000000..13d6a48
--- /dev/null
+++ b/google/api_core/gapic_v1/config_async.py
@@ -0,0 +1,42 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""AsyncIO helpers for loading gapic configuration data.
+
+The Google API generator creates supplementary configuration for each RPC
+method to tell the client library how to deal with retries and timeouts.
+"""
+
+from google.api_core import retry_async
+from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1.config import MethodConfig # noqa: F401
+
+
+def parse_method_configs(interface_config):
+ """Creates default retry and timeout objects for each method in a gapic
+ interface config with AsyncIO semantics.
+
+ Args:
+ interface_config (Mapping): The interface config section of the full
+ gapic library config. For example, If the full configuration has
+ an interface named ``google.example.v1.ExampleService`` you would
+ pass in just that interface's configuration, for example
+ ``gapic_config['interfaces']['google.example.v1.ExampleService']``.
+
+ Returns:
+ Mapping[str, MethodConfig]: A mapping of RPC method names to their
+ configuration.
+ """
+ return config.parse_method_configs(
+ interface_config, retry_impl=retry_async.AsyncRetry
+ )
diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py
new file mode 100644
index 0000000..73c8d4b
--- /dev/null
+++ b/google/api_core/gapic_v1/method.py
@@ -0,0 +1,253 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for wrapping low-level gRPC methods with common functionality.
+
+This is used by gapic clients to provide common error mapping, retry, timeout,
+pagination, and long-running operations to gRPC methods.
+"""
+
+import enum
+import functools
+
+from google.api_core import grpc_helpers
+from google.api_core import timeout
+from google.api_core.gapic_v1 import client_info
+
+USE_DEFAULT_METADATA = object()
+
+
+class _MethodDefault(enum.Enum):
+ # Uses enum so that pytype/mypy knows that this is the only possible value.
+ # https://stackoverflow.com/a/60605919/101923
+ #
+ # Literal[_DEFAULT_VALUE] is an alternative, but only added in Python 3.8.
+ # https://docs.python.org/3/library/typing.html#typing.Literal
+ _DEFAULT_VALUE = object()
+
+
+DEFAULT = _MethodDefault._DEFAULT_VALUE
+"""Sentinel value indicating that a retry or timeout argument was unspecified,
+so the default should be used."""
+
+
+def _is_not_none_or_false(value):
+ return value is not None and value is not False
+
+
+def _apply_decorators(func, decorators):
+ """Apply a list of decorators to a given function.
+
+ ``decorators`` may contain items that are ``None`` or ``False`` which will
+ be ignored.
+ """
+ decorators = filter(_is_not_none_or_false, reversed(decorators))
+
+ for decorator in decorators:
+ func = decorator(func)
+
+ return func
+
+
+def _determine_timeout(default_timeout, specified_timeout, retry):
+ """Determines how timeout should be applied to a wrapped method.
+
+ Args:
+ default_timeout (Optional[Timeout]): The default timeout specified
+ at method creation time.
+ specified_timeout (Optional[Timeout]): The timeout specified at
+ invocation time. If :attr:`DEFAULT`, this will be set to
+ the ``default_timeout``.
+ retry (Optional[Retry]): The retry specified at invocation time.
+
+ Returns:
+ Optional[Timeout]: The timeout to apply to the method or ``None``.
+ """
+ # If timeout is specified as a number instead of a Timeout instance,
+ # convert it to a ConstantTimeout.
+ if isinstance(specified_timeout, (int, float)):
+ specified_timeout = timeout.ConstantTimeout(specified_timeout)
+ if isinstance(default_timeout, (int, float)):
+ default_timeout = timeout.ConstantTimeout(default_timeout)
+
+ if specified_timeout is DEFAULT:
+ specified_timeout = default_timeout
+
+ if specified_timeout is default_timeout:
+ # If timeout is the default and the default timeout is exponential and
+ # a non-default retry is specified, make sure the timeout's deadline
+ # matches the retry's. This handles the case where the user leaves
+ # the timeout default but specifies a lower deadline via the retry.
+ if (
+ retry
+ and retry is not DEFAULT
+ and isinstance(default_timeout, timeout.ExponentialTimeout)
+ ):
+ return default_timeout.with_deadline(retry._deadline)
+ else:
+ return default_timeout
+
+ return specified_timeout
+
+
+class _GapicCallable(object):
+ """Callable that applies retry, timeout, and metadata logic.
+
+ Args:
+ target (Callable): The low-level RPC method.
+ retry (google.api_core.retry.Retry): The default retry for the
+ callable. If ``None``, this callable will not retry by default
+ timeout (google.api_core.timeout.Timeout): The default timeout
+ for the callable. If ``None``, this callable will not specify
+ a timeout argument to the low-level RPC method by default.
+ metadata (Sequence[Tuple[str, str]]): Additional metadata that is
+ provided to the RPC method on every invocation. This is merged with
+ any metadata specified during invocation. If ``None``, no
+ additional metadata will be passed to the RPC method.
+ """
+
+ def __init__(self, target, retry, timeout, metadata=None):
+ self._target = target
+ self._retry = retry
+ self._timeout = timeout
+ self._metadata = metadata
+
+ def __call__(self, *args, timeout=DEFAULT, retry=DEFAULT, **kwargs):
+ """Invoke the low-level RPC with retry, timeout, and metadata."""
+ timeout = _determine_timeout(
+ self._timeout,
+ timeout,
+ # Use only the invocation-specified retry only for this, as we only
+ # want to adjust the timeout deadline if the *user* specified
+ # a different retry.
+ retry,
+ )
+
+ if retry is DEFAULT:
+ retry = self._retry
+
+ # Apply all applicable decorators.
+ wrapped_func = _apply_decorators(self._target, [retry, timeout])
+
+ # Add the user agent metadata to the call.
+ if self._metadata is not None:
+ metadata = kwargs.get("metadata", [])
+ # Due to the nature of invocation, None should be treated the same
+ # as not specified.
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ metadata.extend(self._metadata)
+ kwargs["metadata"] = metadata
+
+ return wrapped_func(*args, **kwargs)
+
+
+def wrap_method(
+ func,
+ default_retry=None,
+ default_timeout=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+):
+ """Wrap an RPC method with common behavior.
+
+ This applies common error wrapping, retry, and timeout behavior a function.
+ The wrapped function will take optional ``retry`` and ``timeout``
+ arguments.
+
+ For example::
+
+ import google.api_core.gapic_v1.method
+ from google.api_core import retry
+ from google.api_core import timeout
+
+ # The original RPC method.
+ def get_topic(name, timeout=None):
+ request = publisher_v2.GetTopicRequest(name=name)
+ return publisher_stub.GetTopic(request, timeout=timeout)
+
+ default_retry = retry.Retry(deadline=60)
+ default_timeout = timeout.Timeout(deadline=60)
+ wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method(
+ get_topic, default_retry)
+
+ # Execute get_topic with default retry and timeout:
+ response = wrapped_get_topic()
+
+ # Execute get_topic without doing any retying but with the default
+ # timeout:
+ response = wrapped_get_topic(retry=None)
+
+ # Execute get_topic but only retry on 5xx errors:
+ my_retry = retry.Retry(retry.if_exception_type(
+ exceptions.InternalServerError))
+ response = wrapped_get_topic(retry=my_retry)
+
+ The way this works is by late-wrapping the given function with the retry
+ and timeout decorators. Essentially, when ``wrapped_get_topic()`` is
+ called:
+
+ * ``get_topic()`` is first wrapped with the ``timeout`` into
+ ``get_topic_with_timeout``.
+ * ``get_topic_with_timeout`` is wrapped with the ``retry`` into
+ ``get_topic_with_timeout_and_retry()``.
+ * The final ``get_topic_with_timeout_and_retry`` is called passing through
+ the ``args`` and ``kwargs``.
+
+ The callstack is therefore::
+
+ method.__call__() ->
+ Retry.__call__() ->
+ Timeout.__call__() ->
+ wrap_errors() ->
+ get_topic()
+
+ Note that if ``timeout`` or ``retry`` is ``None``, then they are not
+ applied to the function. For example,
+ ``wrapped_get_topic(timeout=None, retry=None)`` is more or less
+ equivalent to just calling ``get_topic`` but with error re-mapping.
+
+ Args:
+ func (Callable[Any]): The function to wrap. It should accept an
+ optional ``timeout`` argument. If ``metadata`` is not ``None``, it
+ should accept a ``metadata`` argument.
+ default_retry (Optional[google.api_core.Retry]): The default retry
+ strategy. If ``None``, the method will not retry by default.
+ default_timeout (Optional[google.api_core.Timeout]): The default
+ timeout strategy. Can also be specified as an int or float. If
+ ``None``, the method will not have timeout specified by default.
+ client_info
+ (Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
+ Client information used to create a user-agent string that's
+ passed as gRPC metadata to the method. If unspecified, then
+ a sane default will be used. If ``None``, then no user agent
+ metadata will be provided to the RPC method.
+
+ Returns:
+ Callable: A new callable that takes optional ``retry`` and ``timeout``
+ arguments and applies the common error mapping, retry, timeout,
+ and metadata behavior to the low-level RPC method.
+ """
+ func = grpc_helpers.wrap_errors(func)
+
+ if client_info is not None:
+ user_agent_metadata = [client_info.to_grpc_metadata()]
+ else:
+ user_agent_metadata = None
+
+ return functools.wraps(func)(
+ _GapicCallable(
+ func, default_retry, default_timeout, metadata=user_agent_metadata
+ )
+ )
diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py
new file mode 100644
index 0000000..84c99aa
--- /dev/null
+++ b/google/api_core/gapic_v1/method_async.py
@@ -0,0 +1,48 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""AsyncIO helpers for wrapping gRPC methods with common functionality.
+
+This is used by gapic clients to provide common error mapping, retry, timeout,
+pagination, and long-running operations to gRPC methods.
+"""
+
+import functools
+
+from google.api_core import grpc_helpers_async
+from google.api_core.gapic_v1 import client_info
+from google.api_core.gapic_v1.method import _GapicCallable
+from google.api_core.gapic_v1.method import DEFAULT # noqa: F401
+from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401
+
+
+def wrap_method(
+ func,
+ default_retry=None,
+ default_timeout=None,
+ client_info=client_info.DEFAULT_CLIENT_INFO,
+):
+ """Wrap an async RPC method with common behavior.
+
+ Returns:
+ Callable: A new callable that takes optional ``retry`` and ``timeout``
+ arguments and applies the common error mapping, retry, timeout,
+ and metadata behavior to the low-level RPC method.
+ """
+ func = grpc_helpers_async.wrap_errors(func)
+
+ metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
+
+ return functools.wraps(func)(
+ _GapicCallable(func, default_retry, default_timeout, metadata=metadata)
+ )
diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py
new file mode 100644
index 0000000..a7bcb5a
--- /dev/null
+++ b/google/api_core/gapic_v1/routing_header.py
@@ -0,0 +1,57 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for constructing routing headers.
+
+These headers are used by Google infrastructure to determine how to route
+requests, especially for services that are regional.
+
+Generally, these headers are specified as gRPC metadata.
+"""
+
+from urllib.parse import urlencode
+
+ROUTING_METADATA_KEY = "x-goog-request-params"
+
+
+def to_routing_header(params):
+ """Returns a routing header string for the given request parameters.
+
+ Args:
+ params (Mapping[str, Any]): A dictionary containing the request
+ parameters used for routing.
+
+ Returns:
+ str: The routing header string.
+ """
+ return urlencode(
+ params,
+ # Per Google API policy (go/api-url-encoding), / is not encoded.
+ safe="/",
+ )
+
+
+def to_grpc_metadata(params):
+ """Returns the gRPC metadata containing the routing headers for the given
+ request parameters.
+
+ Args:
+ params (Mapping[str, Any]): A dictionary containing the request
+ parameters used for routing.
+
+ Returns:
+ Tuple(str, str): The gRPC metadata containing the routing header key
+ and value.
+ """
+ return (ROUTING_METADATA_KEY, to_routing_header(params))
diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py
new file mode 100644
index 0000000..fba7802
--- /dev/null
+++ b/google/api_core/general_helpers.py
@@ -0,0 +1,16 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This import for backward compatibiltiy only.
+from functools import wraps # noqa: F401 pragma: NO COVER
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
new file mode 100644
index 0000000..594df98
--- /dev/null
+++ b/google/api_core/grpc_helpers.py
@@ -0,0 +1,495 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`grpc`."""
+
+import collections
+import functools
+
+import grpc
+import pkg_resources
+
+from google.api_core import exceptions
+import google.auth
+import google.auth.credentials
+import google.auth.transport.grpc
+import google.auth.transport.requests
+
+try:
+ import grpc_gcp
+
+ HAS_GRPC_GCP = True
+except ImportError:
+ HAS_GRPC_GCP = False
+
+try:
+ # google.auth.__version__ was added in 1.26.0
+ _GOOGLE_AUTH_VERSION = google.auth.__version__
+except AttributeError:
+ try: # try pkg_resources if it is available
+ _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
+ except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GOOGLE_AUTH_VERSION = None
+
+# The list of gRPC Callable interfaces that return iterators.
+_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
+
+
+def _patch_callable_name(callable_):
+ """Fix-up gRPC callable attributes.
+
+ gRPC callable lack the ``__name__`` attribute which causes
+ :func:`functools.wraps` to error. This adds the attribute if needed.
+ """
+ if not hasattr(callable_, "__name__"):
+ callable_.__name__ = callable_.__class__.__name__
+
+
+def _wrap_unary_errors(callable_):
+ """Map errors for Unary-Unary and Stream-Unary gRPC callables."""
+ _patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ try:
+ return callable_(*args, **kwargs)
+ except grpc.RpcError as exc:
+ raise exceptions.from_grpc_error(exc) from exc
+
+ return error_remapped_callable
+
+
+class _StreamingResponseIterator(grpc.Call):
+ def __init__(self, wrapped, prefetch_first_result=True):
+ self._wrapped = wrapped
+
+ # This iterator is used in a retry context, and returned outside after init.
+ # gRPC will not throw an exception until the stream is consumed, so we need
+ # to retrieve the first result, in order to fail, in order to trigger a retry.
+ try:
+ if prefetch_first_result:
+ self._stored_first_result = next(self._wrapped)
+ except TypeError:
+ # It is possible the wrapped method isn't an iterable (a grpc.Call
+ # for instance). If this happens don't store the first result.
+ pass
+ except StopIteration:
+ # ignore stop iteration at this time. This should be handled outside of retry.
+ pass
+
+ def __iter__(self):
+ """This iterator is also an iterable that returns itself."""
+ return self
+
+ def __next__(self):
+ """Get the next response from the stream.
+
+ Returns:
+ protobuf.Message: A single response from the stream.
+ """
+ try:
+ if hasattr(self, "_stored_first_result"):
+ result = self._stored_first_result
+ del self._stored_first_result
+ return result
+ return next(self._wrapped)
+ except grpc.RpcError as exc:
+ # If the stream has already returned data, we cannot recover here.
+ raise exceptions.from_grpc_error(exc) from exc
+
+ # grpc.Call & grpc.RpcContext interface
+
+ def add_callback(self, callback):
+ return self._wrapped.add_callback(callback)
+
+ def cancel(self):
+ return self._wrapped.cancel()
+
+ def code(self):
+ return self._wrapped.code()
+
+ def details(self):
+ return self._wrapped.details()
+
+ def initial_metadata(self):
+ return self._wrapped.initial_metadata()
+
+ def is_active(self):
+ return self._wrapped.is_active()
+
+ def time_remaining(self):
+ return self._wrapped.time_remaining()
+
+ def trailing_metadata(self):
+ return self._wrapped.trailing_metadata()
+
+
+def _wrap_stream_errors(callable_):
+ """Wrap errors for Unary-Stream and Stream-Stream gRPC callables.
+
+ The callables that return iterators require a bit more logic to re-map
+ errors when iterating. This wraps both the initial invocation and the
+ iterator of the return value to re-map errors.
+ """
+ _patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ try:
+ result = callable_(*args, **kwargs)
+ # Auto-fetching the first result causes PubSub client's streaming pull
+ # to hang when re-opening the stream, thus we need examine the hacky
+ # hidden flag to see if pre-fetching is disabled.
+ # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257
+ prefetch_first = getattr(callable_, "_prefetch_first_result_", True)
+ return _StreamingResponseIterator(
+ result, prefetch_first_result=prefetch_first
+ )
+ except grpc.RpcError as exc:
+ raise exceptions.from_grpc_error(exc) from exc
+
+ return error_remapped_callable
+
+
+def wrap_errors(callable_):
+ """Wrap a gRPC callable and map :class:`grpc.RpcErrors` to friendly error
+ classes.
+
+ Errors raised by the gRPC callable are mapped to the appropriate
+ :class:`google.api_core.exceptions.GoogleAPICallError` subclasses.
+ The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ available from the ``response`` property on the mapped exception. This
+ is useful for extracting metadata from the original error.
+
+ Args:
+ callable_ (Callable): A gRPC callable.
+
+ Returns:
+ Callable: The wrapped gRPC callable.
+ """
+ if isinstance(callable_, _STREAM_WRAP_CLASSES):
+ return _wrap_stream_errors(callable_)
+ else:
+ return _wrap_unary_errors(callable_)
+
+
+def _create_composite_credentials(
+ credentials=None,
+ credentials_file=None,
+ default_scopes=None,
+ scopes=None,
+ ssl_credentials=None,
+ quota_project_id=None,
+ default_host=None,
+):
+ """Create the composite credentials for secure channels.
+
+ Args:
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ default_scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ quota_project_id (str): An optional project to use for billing and quota.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+
+ Returns:
+ grpc.ChannelCredentials: The composed channel credentials object.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials' and 'credentials_file' are mutually exclusive."
+ )
+
+ if credentials_file:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, default_scopes=default_scopes
+ )
+ elif credentials:
+ credentials = google.auth.credentials.with_scopes_if_required(
+ credentials, scopes=scopes, default_scopes=default_scopes
+ )
+ else:
+ credentials, _ = google.auth.default(
+ scopes=scopes, default_scopes=default_scopes
+ )
+
+ if quota_project_id and isinstance(
+ credentials, google.auth.credentials.CredentialsWithQuotaProject
+ ):
+ credentials = credentials.with_quota_project(quota_project_id)
+
+ request = google.auth.transport.requests.Request()
+
+ # Create the metadata plugin for inserting the authorization header.
+ metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
+ credentials, request, default_host=default_host,
+ )
+
+ # Create a set of grpc.CallCredentials using the metadata plugin.
+ google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
+
+ if ssl_credentials is None:
+ ssl_credentials = grpc.ssl_channel_credentials()
+
+ # Combine the ssl credentials and the authorization credentials.
+ return grpc.composite_channel_credentials(ssl_credentials, google_auth_credentials)
+
+
+def create_channel(
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ default_scopes=None,
+ default_host=None,
+ **kwargs
+):
+ """Create a secure channel with credentials.
+
+ Args:
+ target (str): The target service address in the format 'hostname:port'.
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ quota_project_id (str): An optional project to use for billing and quota.
+ default_scopes (Sequence[str]): Default scopes passed by a Google client
+ library. Use 'scopes' for user-defined scopes.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ kwargs: Additional key-word args passed to
+ :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`.
+
+ Returns:
+ grpc.Channel: The created channel.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+
+ composite_credentials = _create_composite_credentials(
+ credentials=credentials,
+ credentials_file=credentials_file,
+ default_scopes=default_scopes,
+ scopes=scopes,
+ ssl_credentials=ssl_credentials,
+ quota_project_id=quota_project_id,
+ default_host=default_host,
+ )
+
+ if HAS_GRPC_GCP:
+ # If grpc_gcp module is available use grpc_gcp.secure_channel,
+ # otherwise, use grpc.secure_channel to create grpc channel.
+ return grpc_gcp.secure_channel(target, composite_credentials, **kwargs)
+ else:
+ return grpc.secure_channel(target, composite_credentials, **kwargs)
+
+
+_MethodCall = collections.namedtuple(
+ "_MethodCall", ("request", "timeout", "metadata", "credentials")
+)
+
+_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
+
+
+class _CallableStub(object):
+ """Stub for the grpc.*MultiCallable interfaces."""
+
+ def __init__(self, method, channel):
+ self._method = method
+ self._channel = channel
+ self.response = None
+ """Union[protobuf.Message, Callable[protobuf.Message], exception]:
+ The response to give when invoking this callable. If this is a
+ callable, it will be invoked with the request protobuf. If it's an
+ exception, the exception will be raised when this is invoked.
+ """
+ self.responses = None
+ """Iterator[
+ Union[protobuf.Message, Callable[protobuf.Message], exception]]:
+ An iterator of responses. If specified, self.response will be populated
+ on each invocation by calling ``next(self.responses)``."""
+ self.requests = []
+ """List[protobuf.Message]: All requests sent to this callable."""
+ self.calls = []
+ """List[Tuple]: All invocations of this callable. Each tuple is the
+ request, timeout, metadata, and credentials."""
+
+ def __call__(self, request, timeout=None, metadata=None, credentials=None):
+ self._channel.requests.append(_ChannelRequest(self._method, request))
+ self.calls.append(_MethodCall(request, timeout, metadata, credentials))
+ self.requests.append(request)
+
+ response = self.response
+ if self.responses is not None:
+ if response is None:
+ response = next(self.responses)
+ else:
+ raise ValueError(
+ "{method}.response and {method}.responses are mutually "
+ "exclusive.".format(method=self._method)
+ )
+
+ if callable(response):
+ return response(request)
+
+ if isinstance(response, Exception):
+ raise response
+
+ if response is not None:
+ return response
+
+ raise ValueError('Method stub for "{}" has no response.'.format(self._method))
+
+
+def _simplify_method_name(method):
+ """Simplifies a gRPC method name.
+
+ When gRPC invokes the channel to create a callable, it gives a full
+ method name like "/google.pubsub.v1.Publisher/CreateTopic". This
+ returns just the name of the method, in this case "CreateTopic".
+
+ Args:
+ method (str): The name of the method.
+
+ Returns:
+ str: The simplified name of the method.
+ """
+ return method.rsplit("/", 1).pop()
+
+
+class ChannelStub(grpc.Channel):
+ """A testing stub for the grpc.Channel interface.
+
+ This can be used to test any client that eventually uses a gRPC channel
+ to communicate. By passing in a channel stub, you can configure which
+ responses are returned and track which requests are made.
+
+ For example:
+
+ .. code-block:: python
+
+ channel_stub = grpc_helpers.ChannelStub()
+ client = FooClient(channel=channel_stub)
+
+ channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
+
+ foo = client.get_foo(labels=['baz'])
+
+ assert foo.name == 'bar'
+ assert channel_stub.GetFoo.requests[0].labels = ['baz']
+
+ Each method on the stub can be accessed and configured on the channel.
+ Here's some examples of various configurations:
+
+ .. code-block:: python
+
+ # Return a basic response:
+
+ channel_stub.GetFoo.response = foo_pb2.Foo(name='bar')
+ assert client.get_foo().name == 'bar'
+
+ # Raise an exception:
+ channel_stub.GetFoo.response = NotFound('...')
+
+ with pytest.raises(NotFound):
+ client.get_foo()
+
+ # Use a sequence of responses:
+ channel_stub.GetFoo.responses = iter([
+ foo_pb2.Foo(name='bar'),
+ foo_pb2.Foo(name='baz'),
+ ])
+
+ assert client.get_foo().name == 'bar'
+ assert client.get_foo().name == 'baz'
+
+ # Use a callable
+
+ def on_get_foo(request):
+ return foo_pb2.Foo(name='bar' + request.id)
+
+ channel_stub.GetFoo.response = on_get_foo
+
+ assert client.get_foo(id='123').name == 'bar123'
+ """
+
+ def __init__(self, responses=[]):
+ self.requests = []
+ """Sequence[Tuple[str, protobuf.Message]]: A list of all requests made
+ on this channel in order. The tuple is of method name, request
+ message."""
+ self._method_stubs = {}
+
+ def _stub_for_method(self, method):
+ method = _simplify_method_name(method)
+ self._method_stubs[method] = _CallableStub(method, self)
+ return self._method_stubs[method]
+
+ def __getattr__(self, key):
+ try:
+ return self._method_stubs[key]
+ except KeyError:
+ raise AttributeError
+
+ def unary_unary(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.unary_unary implementation."""
+ return self._stub_for_method(method)
+
+ def unary_stream(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.unary_stream implementation."""
+ return self._stub_for_method(method)
+
+ def stream_unary(self, method, request_serializer=None, response_deserializer=None):
+ """grpc.Channel.stream_unary implementation."""
+ return self._stub_for_method(method)
+
+ def stream_stream(
+ self, method, request_serializer=None, response_deserializer=None
+ ):
+ """grpc.Channel.stream_stream implementation."""
+ return self._stub_for_method(method)
+
+ def subscribe(self, callback, try_to_connect=False):
+ """grpc.Channel.subscribe implementation."""
+ pass
+
+ def unsubscribe(self, callback):
+ """grpc.Channel.unsubscribe implementation."""
+ pass
+
+ def close(self):
+ """grpc.Channel.close implementation."""
+ pass
diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py
new file mode 100644
index 0000000..452e787
--- /dev/null
+++ b/google/api_core/grpc_helpers_async.py
@@ -0,0 +1,297 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO helpers for :mod:`grpc` supporting 3.6+.
+
+Please combine more detailed docstring in grpc_helpers.py to use following
+functions. This module is implementing the same surface with AsyncIO semantics.
+"""
+
+import asyncio
+import functools
+
+import grpc
+from grpc import aio
+
+from google.api_core import exceptions, grpc_helpers
+
+
+# TODO(lidiz) Support gRPC GCP wrapper
+HAS_GRPC_GCP = False
+
+# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
+# automatic patching for us. But that means the overhead of creating an
+# extra Python function spreads to every single send and receive.
+
+
+class _WrappedCall(aio.Call):
+ def __init__(self):
+ self._call = None
+
+ def with_call(self, call):
+ """Supplies the call object separately to keep __init__ clean."""
+ self._call = call
+ return self
+
+ async def initial_metadata(self):
+ return await self._call.initial_metadata()
+
+ async def trailing_metadata(self):
+ return await self._call.trailing_metadata()
+
+ async def code(self):
+ return await self._call.code()
+
+ async def details(self):
+ return await self._call.details()
+
+ def cancelled(self):
+ return self._call.cancelled()
+
+ def done(self):
+ return self._call.done()
+
+ def time_remaining(self):
+ return self._call.time_remaining()
+
+ def cancel(self):
+ return self._call.cancel()
+
+ def add_done_callback(self, callback):
+ self._call.add_done_callback(callback)
+
+ async def wait_for_connection(self):
+ try:
+ await self._call.wait_for_connection()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+class _WrappedUnaryResponseMixin(_WrappedCall):
+ def __await__(self):
+ try:
+ response = yield from self._call.__await__()
+ return response
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+class _WrappedStreamResponseMixin(_WrappedCall):
+ def __init__(self):
+ self._wrapped_async_generator = None
+
+ async def read(self):
+ try:
+ return await self._call.read()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ async def _wrapped_aiter(self):
+ try:
+ # NOTE(lidiz) coverage doesn't understand the exception raised from
+ # __anext__ method. It is covered by test case:
+ # test_wrap_stream_errors_aiter_non_rpc_error
+ async for response in self._call: # pragma: no branch
+ yield response
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ def __aiter__(self):
+ if not self._wrapped_async_generator:
+ self._wrapped_async_generator = self._wrapped_aiter()
+ return self._wrapped_async_generator
+
+
+class _WrappedStreamRequestMixin(_WrappedCall):
+ async def write(self, request):
+ try:
+ await self._call.write(request)
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+ async def done_writing(self):
+ try:
+ await self._call.done_writing()
+ except grpc.RpcError as rpc_error:
+ raise exceptions.from_grpc_error(rpc_error) from rpc_error
+
+
+# NOTE(lidiz) Implementing each individual class separately, so we don't
+# expose any API that should not be seen. E.g., __aiter__ in unary-unary
+# RPC, or __await__ in stream-stream RPC.
+class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin, aio.UnaryUnaryCall):
+ """Wrapped UnaryUnaryCall to map exceptions."""
+
+
+class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin, aio.UnaryStreamCall):
+ """Wrapped UnaryStreamCall to map exceptions."""
+
+
+class _WrappedStreamUnaryCall(
+ _WrappedUnaryResponseMixin, _WrappedStreamRequestMixin, aio.StreamUnaryCall
+):
+ """Wrapped StreamUnaryCall to map exceptions."""
+
+
+class _WrappedStreamStreamCall(
+ _WrappedStreamRequestMixin, _WrappedStreamResponseMixin, aio.StreamStreamCall
+):
+ """Wrapped StreamStreamCall to map exceptions."""
+
+
+def _wrap_unary_errors(callable_):
+ """Map errors for Unary-Unary async callables."""
+ grpc_helpers._patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ def error_remapped_callable(*args, **kwargs):
+ call = callable_(*args, **kwargs)
+ return _WrappedUnaryUnaryCall().with_call(call)
+
+ return error_remapped_callable
+
+
+def _wrap_stream_errors(callable_):
+ """Map errors for streaming RPC async callables."""
+ grpc_helpers._patch_callable_name(callable_)
+
+ @functools.wraps(callable_)
+ async def error_remapped_callable(*args, **kwargs):
+ call = callable_(*args, **kwargs)
+
+ if isinstance(call, aio.UnaryStreamCall):
+ call = _WrappedUnaryStreamCall().with_call(call)
+ elif isinstance(call, aio.StreamUnaryCall):
+ call = _WrappedStreamUnaryCall().with_call(call)
+ elif isinstance(call, aio.StreamStreamCall):
+ call = _WrappedStreamStreamCall().with_call(call)
+ else:
+ raise TypeError("Unexpected type of call %s" % type(call))
+
+ await call.wait_for_connection()
+ return call
+
+ return error_remapped_callable
+
+
+def wrap_errors(callable_):
+ """Wrap a gRPC async callable and map :class:`grpc.RpcErrors` to
+ friendly error classes.
+
+ Errors raised by the gRPC callable are mapped to the appropriate
+ :class:`google.api_core.exceptions.GoogleAPICallError` subclasses. The
+ original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ available from the ``response`` property on the mapped exception. This
+ is useful for extracting metadata from the original error.
+
+ Args:
+ callable_ (Callable): A gRPC callable.
+
+ Returns: Callable: The wrapped gRPC callable.
+ """
+ if isinstance(callable_, aio.UnaryUnaryMultiCallable):
+ return _wrap_unary_errors(callable_)
+ else:
+ return _wrap_stream_errors(callable_)
+
+
+def create_channel(
+ target,
+ credentials=None,
+ scopes=None,
+ ssl_credentials=None,
+ credentials_file=None,
+ quota_project_id=None,
+ default_scopes=None,
+ default_host=None,
+ **kwargs
+):
+ """Create an AsyncIO secure channel with credentials.
+
+ Args:
+ target (str): The target service address in the format 'hostname:port'.
+ credentials (google.auth.credentials.Credentials): The credentials. If
+ not specified, then this function will attempt to ascertain the
+ credentials from the environment using :func:`google.auth.default`.
+ scopes (Sequence[str]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ ssl_credentials (grpc.ChannelCredentials): Optional SSL channel
+ credentials. This can be used to specify different certificates.
+ credentials_file (str): A file with credentials that can be loaded with
+ :func:`google.auth.load_credentials_from_file`. This argument is
+ mutually exclusive with credentials.
+ quota_project_id (str): An optional project to use for billing and quota.
+ default_scopes (Sequence[str]): Default scopes passed by a Google client
+ library. Use 'scopes' for user-defined scopes.
+ default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ kwargs: Additional key-word args passed to :func:`aio.secure_channel`.
+
+ Returns:
+ aio.Channel: The created channel.
+
+ Raises:
+ google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ """
+
+ composite_credentials = grpc_helpers._create_composite_credentials(
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ default_scopes=default_scopes,
+ ssl_credentials=ssl_credentials,
+ quota_project_id=quota_project_id,
+ default_host=default_host,
+ )
+
+ return aio.secure_channel(target, composite_credentials, **kwargs)
+
+
+class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall):
+ """Fake implementation for unary-unary RPCs.
+
+ It is a dummy object for response message. Supply the intended response
+ upon the initialization, and the coroutine will return the exact response
+ message.
+ """
+
+ def __init__(self, response=object()):
+ self.response = response
+ self._future = asyncio.get_event_loop().create_future()
+ self._future.set_result(self.response)
+
+ def __await__(self):
+ response = yield from self._future.__await__()
+ return response
+
+
+class FakeStreamUnaryCall(_WrappedStreamUnaryCall):
+ """Fake implementation for stream-unary RPCs.
+
+ It is a dummy object for response message. Supply the intended response
+ upon the initialization, and the coroutine will return the exact response
+ message.
+ """
+
+ def __init__(self, response=object()):
+ self.response = response
+ self._future = asyncio.get_event_loop().create_future()
+ self._future.set_result(self.response)
+
+ def __await__(self):
+ response = yield from self._future.__await__()
+ return response
+
+ async def wait_for_connection(self):
+ pass
diff --git a/google/api_core/iam.py b/google/api_core/iam.py
new file mode 100644
index 0000000..4437c70
--- /dev/null
+++ b/google/api_core/iam.py
@@ -0,0 +1,427 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Non-API-specific IAM policy definitions
+
+For allowed roles / permissions, see:
+https://cloud.google.com/iam/docs/understanding-roles
+
+Example usage:
+
+.. code-block:: python
+
+ # ``get_iam_policy`` returns a :class:'~google.api_core.iam.Policy`.
+ policy = resource.get_iam_policy(requested_policy_version=3)
+
+ phred = "user:phred@example.com"
+ admin_group = "group:admins@groups.example.com"
+ account = "serviceAccount:account-1234@accounts.example.com"
+
+ policy.version = 3
+ policy.bindings = [
+ {
+ "role": "roles/owner",
+ "members": {phred, admin_group, account}
+ },
+ {
+ "role": "roles/editor",
+ "members": {"allAuthenticatedUsers"}
+ },
+ {
+ "role": "roles/viewer",
+ "members": {"allUsers"}
+ "condition": {
+ "title": "request_time",
+ "description": "Requests made before 2021-01-01T00:00:00Z",
+ "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
+ }
+ }
+ ]
+
+ resource.set_iam_policy(policy)
+"""
+
+import collections
+import collections.abc
+import operator
+import warnings
+
+# Generic IAM roles
+
+OWNER_ROLE = "roles/owner"
+"""Generic role implying all rights to an object."""
+
+EDITOR_ROLE = "roles/editor"
+"""Generic role implying rights to modify an object."""
+
+VIEWER_ROLE = "roles/viewer"
+"""Generic role implying rights to access an object."""
+
+_ASSIGNMENT_DEPRECATED_MSG = """\
+Assigning to '{}' is deprecated. Use the `policy.bindings` property to modify bindings instead."""
+
+_DICT_ACCESS_MSG = """\
+Dict access is not supported on policies with version > 1 or with conditional bindings."""
+
+
+class InvalidOperationException(Exception):
+ """Raised when trying to use Policy class as a dict."""
+
+ pass
+
+
+class Policy(collections.abc.MutableMapping):
+ """IAM Policy
+
+ Args:
+ etag (Optional[str]): ETag used to identify a unique of the policy
+ version (Optional[int]): The syntax schema version of the policy.
+
+ Note:
+ Using conditions in bindings requires the policy's version to be set
+ to `3` or greater, depending on the versions that are currently supported.
+
+ Accessing the policy using dict operations will raise InvalidOperationException
+ when the policy's version is set to 3.
+
+ Use the policy.bindings getter/setter to retrieve and modify the policy's bindings.
+
+ See:
+ IAM Policy https://cloud.google.com/iam/reference/rest/v1/Policy
+ Policy versions https://cloud.google.com/iam/docs/policies#versions
+ Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
+ """
+
+ _OWNER_ROLES = (OWNER_ROLE,)
+ """Roles mapped onto our ``owners`` attribute."""
+
+ _EDITOR_ROLES = (EDITOR_ROLE,)
+ """Roles mapped onto our ``editors`` attribute."""
+
+ _VIEWER_ROLES = (VIEWER_ROLE,)
+ """Roles mapped onto our ``viewers`` attribute."""
+
+ def __init__(self, etag=None, version=None):
+ self.etag = etag
+ self.version = version
+ self._bindings = []
+
+ def __iter__(self):
+ self.__check_version__()
+ # Exclude bindings with no members
+ return (binding["role"] for binding in self._bindings if binding["members"])
+
+ def __len__(self):
+ self.__check_version__()
+ # Exclude bindings with no members
+ return len(list(self.__iter__()))
+
+ def __getitem__(self, key):
+ self.__check_version__()
+ for b in self._bindings:
+ if b["role"] == key:
+ return b["members"]
+ # If the binding does not yet exist, create one
+ # NOTE: This will create bindings with no members
+ # which are ignored by __iter__ and __len__
+ new_binding = {"role": key, "members": set()}
+ self._bindings.append(new_binding)
+ return new_binding["members"]
+
+ def __setitem__(self, key, value):
+ self.__check_version__()
+ value = set(value)
+ for binding in self._bindings:
+ if binding["role"] == key:
+ binding["members"] = value
+ return
+ self._bindings.append({"role": key, "members": value})
+
+ def __delitem__(self, key):
+ self.__check_version__()
+ for b in self._bindings:
+ if b["role"] == key:
+ self._bindings.remove(b)
+ return
+ raise KeyError(key)
+
+ def __check_version__(self):
+ """Raise InvalidOperationException if version is greater than 1 or policy contains conditions."""
+ raise_version = self.version is not None and self.version > 1
+
+ if raise_version or self._contains_conditions():
+ raise InvalidOperationException(_DICT_ACCESS_MSG)
+
+ def _contains_conditions(self):
+ for b in self._bindings:
+ if b.get("condition") is not None:
+ return True
+ return False
+
+ @property
+ def bindings(self):
+ """The policy's list of bindings.
+
+ A binding is specified by a dictionary with keys:
+
+ * role (str): Role that is assigned to `members`.
+
+ * members (:obj:`set` of str): Specifies the identities associated to this binding.
+
+ * condition (:obj:`dict` of str:str): Specifies a condition under which this binding will apply.
+
+ * title (str): Title for the condition.
+
+ * description (:obj:str, optional): Description of the condition.
+
+ * expression: A CEL expression.
+
+ Type:
+ :obj:`list` of :obj:`dict`
+
+ See:
+ Policy versions https://cloud.google.com/iam/docs/policies#versions
+ Conditions overview https://cloud.google.com/iam/docs/conditions-overview.
+
+ Example:
+
+ .. code-block:: python
+
+ USER = "user:phred@example.com"
+ ADMIN_GROUP = "group:admins@groups.example.com"
+ SERVICE_ACCOUNT = "serviceAccount:account-1234@accounts.example.com"
+ CONDITION = {
+ "title": "request_time",
+ "description": "Requests made before 2021-01-01T00:00:00Z", # Optional
+ "expression": "request.time < timestamp(\"2021-01-01T00:00:00Z\")"
+ }
+
+ # Set policy's version to 3 before setting bindings containing conditions.
+ policy.version = 3
+
+ policy.bindings = [
+ {
+ "role": "roles/viewer",
+ "members": {USER, ADMIN_GROUP, SERVICE_ACCOUNT},
+ "condition": CONDITION
+ },
+ ...
+ ]
+ """
+ return self._bindings
+
+ @bindings.setter
+ def bindings(self, bindings):
+ self._bindings = bindings
+
+ @property
+ def owners(self):
+ """Legacy access to owner role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ result = set()
+ for role in self._OWNER_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @owners.setter
+ def owners(self, value):
+ """Update owners.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("owners", OWNER_ROLE), DeprecationWarning
+ )
+ self[OWNER_ROLE] = value
+
+ @property
+ def editors(self):
+ """Legacy access to editor role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to access bindings instead.
+ """
+ result = set()
+ for role in self._EDITOR_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @editors.setter
+ def editors(self, value):
+ """Update editors.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("editors", EDITOR_ROLE),
+ DeprecationWarning,
+ )
+ self[EDITOR_ROLE] = value
+
+ @property
+ def viewers(self):
+ """Legacy access to viewer role.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ result = set()
+ for role in self._VIEWER_ROLES:
+ for member in self.get(role, ()):
+ result.add(member)
+ return frozenset(result)
+
+ @viewers.setter
+ def viewers(self, value):
+ """Update viewers.
+
+ Raise InvalidOperationException if version is greater than 1 or policy contains conditions.
+
+ DEPRECATED: use `policy.bindings` to modify bindings instead.
+ """
+ warnings.warn(
+ _ASSIGNMENT_DEPRECATED_MSG.format("viewers", VIEWER_ROLE),
+ DeprecationWarning,
+ )
+ self[VIEWER_ROLE] = value
+
+ @staticmethod
+ def user(email):
+ """Factory method for a user member.
+
+ Args:
+ email (str): E-mail for this particular user.
+
+ Returns:
+ str: A member string corresponding to the given user.
+ """
+ return "user:%s" % (email,)
+
+ @staticmethod
+ def service_account(email):
+ """Factory method for a service account member.
+
+ Args:
+ email (str): E-mail for this particular service account.
+
+ Returns:
+ str: A member string corresponding to the given service account.
+
+ """
+ return "serviceAccount:%s" % (email,)
+
+ @staticmethod
+ def group(email):
+ """Factory method for a group member.
+
+ Args:
+ email (str): An id or e-mail for this particular group.
+
+ Returns:
+ str: A member string corresponding to the given group.
+ """
+ return "group:%s" % (email,)
+
+ @staticmethod
+ def domain(domain):
+ """Factory method for a domain member.
+
+ Args:
+ domain (str): The domain for this member.
+
+ Returns:
+ str: A member string corresponding to the given domain.
+ """
+ return "domain:%s" % (domain,)
+
+ @staticmethod
+ def all_users():
+ """Factory method for a member representing all users.
+
+ Returns:
+ str: A member string representing all users.
+ """
+ return "allUsers"
+
+ @staticmethod
+ def authenticated_users():
+ """Factory method for a member representing all authenticated users.
+
+ Returns:
+ str: A member string representing all authenticated users.
+ """
+ return "allAuthenticatedUsers"
+
+ @classmethod
+ def from_api_repr(cls, resource):
+ """Factory: create a policy from a JSON resource.
+
+ Args:
+ resource (dict): policy resource returned by ``getIamPolicy`` API.
+
+ Returns:
+ :class:`Policy`: the parsed policy
+ """
+ version = resource.get("version")
+ etag = resource.get("etag")
+ policy = cls(etag, version)
+ policy.bindings = resource.get("bindings", [])
+
+ for binding in policy.bindings:
+ binding["members"] = set(binding.get("members", ()))
+
+ return policy
+
+ def to_api_repr(self):
+ """Render a JSON policy resource.
+
+ Returns:
+ dict: a resource to be passed to the ``setIamPolicy`` API.
+ """
+ resource = {}
+
+ if self.etag is not None:
+ resource["etag"] = self.etag
+
+ if self.version is not None:
+ resource["version"] = self.version
+
+ if self._bindings and len(self._bindings) > 0:
+ bindings = []
+ for binding in self._bindings:
+ members = binding.get("members")
+ if members:
+ new_binding = {"role": binding["role"], "members": sorted(members)}
+ condition = binding.get("condition")
+ if condition:
+ new_binding["condition"] = condition
+ bindings.append(new_binding)
+
+ if bindings:
+ # Sort bindings by role
+ key = operator.itemgetter("role")
+ resource["bindings"] = sorted(bindings, key=key)
+
+ return resource
diff --git a/google/api_core/operation.py b/google/api_core/operation.py
new file mode 100644
index 0000000..b17f753
--- /dev/null
+++ b/google/api_core/operation.py
@@ -0,0 +1,351 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operation using :meth:`Operation.result`:
+
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+ result = operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+
+ def my_callback(future):
+ result = future.result()
+
+ operation.add_done_callback(my_callback)
+
+"""
+
+import functools
+import threading
+
+from google.api_core import exceptions
+from google.api_core import protobuf_helpers
+from google.api_core.future import polling
+from google.longrunning import operations_pb2
+from google.protobuf import json_format
+from google.rpc import code_pb2
+
+
+class Operation(polling.PollingFuture):
+ """A Future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The
+ initial operation.
+ refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel
+ the operation.
+ result_type (func:`type`): The protobuf type for the operation's
+ result.
+ metadata_type (func:`type`): The protobuf type for the operation's
+ metadata.
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(
+ self,
+ operation,
+ refresh,
+ cancel,
+ result_type,
+ metadata_type=None,
+ retry=polling.DEFAULT_RETRY,
+ ):
+ super(Operation, self).__init__(retry=retry)
+ self._operation = operation
+ self._refresh = refresh
+ self._cancel = cancel
+ self._result_type = result_type
+ self._metadata_type = metadata_type
+ self._completion_lock = threading.Lock()
+ # Invoke this in case the operation came back already complete.
+ self._set_result_from_operation()
+
+ @property
+ def operation(self):
+ """google.longrunning.Operation: The current long-running operation."""
+ return self._operation
+
+ @property
+ def metadata(self):
+ """google.protobuf.Message: the current operation metadata."""
+ if not self._operation.HasField("metadata"):
+ return None
+
+ return protobuf_helpers.from_any_pb(
+ self._metadata_type, self._operation.metadata
+ )
+
+ @classmethod
+ def deserialize(self, payload):
+ """Deserialize a ``google.longrunning.Operation`` protocol buffer.
+
+ Args:
+ payload (bytes): A serialized operation protocol buffer.
+
+ Returns:
+ ~.operations_pb2.Operation: An Operation protobuf object.
+ """
+ return operations_pb2.Operation.FromString(payload)
+
+ def _set_result_from_operation(self):
+ """Set the result or exception from the operation if it is complete."""
+ # This must be done in a lock to prevent the polling thread
+ # and main thread from both executing the completion logic
+ # at the same time.
+ with self._completion_lock:
+ # If the operation isn't complete or if the result has already been
+ # set, do not call set_result/set_exception again.
+ # Note: self._result_set is set to True in set_result and
+ # set_exception, in case those methods are invoked directly.
+ if not self._operation.done or self._result_set:
+ return
+
+ if self._operation.HasField("response"):
+ response = protobuf_helpers.from_any_pb(
+ self._result_type, self._operation.response
+ )
+ self.set_result(response)
+ elif self._operation.HasField("error"):
+ exception = exceptions.from_grpc_status(
+ status_code=self._operation.error.code,
+ message=self._operation.error.message,
+ errors=(self._operation.error,),
+ response=self._operation,
+ )
+ self.set_exception(exception)
+ else:
+ exception = exceptions.GoogleAPICallError(
+ "Unexpected state: Long-running operation had neither "
+ "response nor error set."
+ )
+ self.set_exception(exception)
+
+ def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
+ """Refresh the operation and update the result if needed.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ """
+ # If the currently cached operation is done, no need to make another
+ # RPC as it will not change once done.
+ if not self._operation.done:
+ self._operation = self._refresh(retry=retry)
+ self._set_result_from_operation()
+
+ def done(self, retry=polling.DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ self._refresh_and_update(retry)
+ return self._operation.done
+
+ def cancel(self):
+ """Attempt to cancel the operation.
+
+ Returns:
+ bool: True if the cancel RPC was made, False if the operation is
+ already complete.
+ """
+ if self.done():
+ return False
+
+ self._cancel()
+ return True
+
+ def cancelled(self):
+ """True if the operation was cancelled."""
+ self._refresh_and_update()
+ return (
+ self._operation.HasField("error")
+ and self._operation.error.code == code_pb2.CANCELLED
+ )
+
+
+def _refresh_http(api_request, operation_name, retry=None):
+ """Refresh an operation using a JSON/HTTP client.
+
+ Args:
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ operation_name (str): The name of the operation.
+ retry (google.api_core.retry.Retry): (Optional) retry policy
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The operation.
+ """
+ path = "operations/{}".format(operation_name)
+
+ if retry is not None:
+ api_request = retry(api_request)
+
+ api_response = api_request(method="GET", path=path)
+ return json_format.ParseDict(api_response, operations_pb2.Operation())
+
+
+def _cancel_http(api_request, operation_name):
+ """Cancel an operation using a JSON/HTTP client.
+
+ Args:
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ operation_name (str): The name of the operation.
+ """
+ path = "operations/{}:cancel".format(operation_name)
+ api_request(method="POST", path=path)
+
+
+def from_http_json(operation, api_request, result_type, **kwargs):
+ """Create an operation future using a HTTP/JSON client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via `HTTP/JSON`_.
+
+ .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\
+ v1beta1/operations#Operation
+
+ Args:
+ operation (dict): Operation as a dictionary.
+ api_request (Callable): A callable used to make an API request. This
+ should generally be
+ :meth:`google.cloud._http.Connection.api_request`.
+ result_type (:func:`type`): The protobuf result type.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ operation_proto = json_format.ParseDict(operation, operations_pb2.Operation())
+ refresh = functools.partial(_refresh_http, api_request, operation_proto.name)
+ cancel = functools.partial(_cancel_http, api_request, operation_proto.name)
+ return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
+
+
+def _refresh_grpc(operations_stub, operation_name, retry=None):
+ """Refresh an operation using a gRPC client.
+
+ Args:
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The gRPC operations stub.
+ operation_name (str): The name of the operation.
+ retry (google.api_core.retry.Retry): (Optional) retry policy
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The operation.
+ """
+ request_pb = operations_pb2.GetOperationRequest(name=operation_name)
+
+ rpc = operations_stub.GetOperation
+ if retry is not None:
+ rpc = retry(rpc)
+
+ return rpc(request_pb)
+
+
+def _cancel_grpc(operations_stub, operation_name):
+ """Cancel an operation using a gRPC client.
+
+ Args:
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The gRPC operations stub.
+ operation_name (str): The name of the operation.
+ """
+ request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
+ operations_stub.CancelOperation(request_pb)
+
+
+def from_grpc(operation, operations_stub, result_type, grpc_metadata=None, **kwargs):
+ """Create an operation future using a gRPC client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via gRPC.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_stub (google.longrunning.operations_pb2.OperationsStub):
+ The operations stub.
+ result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(
+ _refresh_grpc, operations_stub, operation.name, metadata=grpc_metadata
+ )
+ cancel = functools.partial(
+ _cancel_grpc, operations_stub, operation.name, metadata=grpc_metadata
+ )
+ return Operation(operation, refresh, cancel, result_type, **kwargs)
+
+
+def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
+ """Create an operation future from a gapic client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via a gapic client.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_client (google.api_core.operations_v1.OperationsClient):
+ The operations client.
+ result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(
+ operations_client.get_operation, operation.name, metadata=grpc_metadata
+ )
+ cancel = functools.partial(
+ operations_client.cancel_operation, operation.name, metadata=grpc_metadata
+ )
+ return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operation_async.py b/google/api_core/operation_async.py
new file mode 100644
index 0000000..6bae865
--- /dev/null
+++ b/google/api_core/operation_async.py
@@ -0,0 +1,221 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO futures for long-running operations returned from Google Cloud APIs.
+
+These futures can be used to await for the result of a long-running operation
+using :meth:`AsyncOperation.result`:
+
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+ result = await operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ operation = my_api_client.long_running_method()
+
+ def my_callback(future):
+ result = await future.result()
+
+ operation.add_done_callback(my_callback)
+
+"""
+
+import functools
+import threading
+
+from google.api_core import exceptions
+from google.api_core import protobuf_helpers
+from google.api_core.future import async_future
+from google.longrunning import operations_pb2
+from google.rpc import code_pb2
+
+
+class AsyncOperation(async_future.AsyncFuture):
+ """A Future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The
+ initial operation.
+ refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel
+ the operation.
+ result_type (func:`type`): The protobuf type for the operation's
+ result.
+ metadata_type (func:`type`): The protobuf type for the operation's
+ metadata.
+ retry (google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+ """
+
+ def __init__(
+ self,
+ operation,
+ refresh,
+ cancel,
+ result_type,
+ metadata_type=None,
+ retry=async_future.DEFAULT_RETRY,
+ ):
+ super().__init__(retry=retry)
+ self._operation = operation
+ self._refresh = refresh
+ self._cancel = cancel
+ self._result_type = result_type
+ self._metadata_type = metadata_type
+ self._completion_lock = threading.Lock()
+ # Invoke this in case the operation came back already complete.
+ self._set_result_from_operation()
+
+ @property
+ def operation(self):
+ """google.longrunning.Operation: The current long-running operation."""
+ return self._operation
+
+ @property
+ def metadata(self):
+ """google.protobuf.Message: the current operation metadata."""
+ if not self._operation.HasField("metadata"):
+ return None
+
+ return protobuf_helpers.from_any_pb(
+ self._metadata_type, self._operation.metadata
+ )
+
+ @classmethod
+ def deserialize(cls, payload):
+ """Deserialize a ``google.longrunning.Operation`` protocol buffer.
+
+ Args:
+ payload (bytes): A serialized operation protocol buffer.
+
+ Returns:
+ ~.operations_pb2.Operation: An Operation protobuf object.
+ """
+ return operations_pb2.Operation.FromString(payload)
+
+ def _set_result_from_operation(self):
+ """Set the result or exception from the operation if it is complete."""
+ # This must be done in a lock to prevent the async_future thread
+ # and main thread from both executing the completion logic
+ # at the same time.
+ with self._completion_lock:
+ # If the operation isn't complete or if the result has already been
+ # set, do not call set_result/set_exception again.
+ if not self._operation.done or self._future.done():
+ return
+
+ if self._operation.HasField("response"):
+ response = protobuf_helpers.from_any_pb(
+ self._result_type, self._operation.response
+ )
+ self.set_result(response)
+ elif self._operation.HasField("error"):
+ exception = exceptions.GoogleAPICallError(
+ self._operation.error.message,
+ errors=(self._operation.error,),
+ response=self._operation,
+ )
+ self.set_exception(exception)
+ else:
+ exception = exceptions.GoogleAPICallError(
+ "Unexpected state: Long-running operation had neither "
+ "response nor error set."
+ )
+ self.set_exception(exception)
+
+ async def _refresh_and_update(self, retry=async_future.DEFAULT_RETRY):
+ """Refresh the operation and update the result if needed.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ """
+ # If the currently cached operation is done, no need to make another
+ # RPC as it will not change once done.
+ if not self._operation.done:
+ self._operation = await self._refresh(retry=retry)
+ self._set_result_from_operation()
+
+ async def done(self, retry=async_future.DEFAULT_RETRY):
+ """Checks to see if the operation is complete.
+
+ Args:
+ retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+
+ Returns:
+ bool: True if the operation is complete, False otherwise.
+ """
+ await self._refresh_and_update(retry)
+ return self._operation.done
+
+ async def cancel(self):
+ """Attempt to cancel the operation.
+
+ Returns:
+ bool: True if the cancel RPC was made, False if the operation is
+ already complete.
+ """
+ result = await self.done()
+ if result:
+ return False
+ else:
+ await self._cancel()
+ return True
+
+ async def cancelled(self):
+ """True if the operation was cancelled."""
+ await self._refresh_and_update()
+ return (
+ self._operation.HasField("error")
+ and self._operation.error.code == code_pb2.CANCELLED
+ )
+
+
+def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
+ """Create an operation future from a gapic client.
+
+ This interacts with the long-running operations `service`_ (specific
+ to a given API) via a gapic client.
+
+ .. _service: https://github.com/googleapis/googleapis/blob/\
+ 050400df0fdb16f63b63e9dee53819044bffc857/\
+ google/longrunning/operations.proto#L38
+
+ Args:
+ operation (google.longrunning.operations_pb2.Operation): The operation.
+ operations_client (google.api_core.operations_v1.OperationsClient):
+ The operations client.
+ result_type (:func:`type`): The protobuf result type.
+ grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
+ to the rpc.
+ kwargs: Keyword args passed into the :class:`Operation` constructor.
+
+ Returns:
+ ~.api_core.operation.Operation: The operation future to track the given
+ operation.
+ """
+ refresh = functools.partial(
+ operations_client.get_operation, operation.name, metadata=grpc_metadata
+ )
+ cancel = functools.partial(
+ operations_client.cancel_operation, operation.name, metadata=grpc_metadata
+ )
+ return AsyncOperation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
new file mode 100644
index 0000000..6118645
--- /dev/null
+++ b/google/api_core/operations_v1/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Package for interacting with the google.longrunning.operations meta-API."""
+
+from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient
+from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient
+from google.api_core.operations_v1.operations_client import OperationsClient
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+
+__all__ = [
+ "AbstractOperationsClient",
+ "OperationsAsyncClient",
+ "OperationsClient",
+ "OperationsRestTransport"
+]
diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py
new file mode 100644
index 0000000..631094e
--- /dev/null
+++ b/google/api_core/operations_v1/abstract_operations_client.py
@@ -0,0 +1,564 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+
+OptionalRetry = Union[retries.Retry, object]
+
+
+class AbstractOperationsClientMeta(type):
+ """Metaclass for the Operations client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+ _transport_registry["rest"] = OperationsRestTransport
+
+ def get_transport_class(
+ cls, label: Optional[str] = None,
+ ) -> Type[OperationsTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "longrunning.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> OperationsTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ OperationsTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P<folder>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P<project>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, OperationsTransport):
+ # transport is a OperationsTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
+
+ def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ unning operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def delete_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/operations_async_client.py b/google/api_core/operations_v1/operations_async_client.py
new file mode 100644
index 0000000..5a5e556
--- /dev/null
+++ b/google/api_core/operations_v1/operations_async_client.py
@@ -0,0 +1,322 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An async client for the google.longrunning.operations meta-API.
+
+.. _Google API Style Guide:
+ https://cloud.google.com/apis/design/design_pattern
+ s#long_running_operations
+.. _google/longrunning/operations.proto:
+ https://github.com/googleapis/googleapis/blob/master/google/longrunning
+ /operations.proto
+"""
+
+import functools
+
+from google.api_core import gapic_v1, page_iterator_async
+from google.api_core.operations_v1 import operations_client_config
+from google.longrunning import operations_pb2
+
+
+class OperationsAsyncClient:
+ """Async client for interacting with long-running operations.
+
+ Args:
+ channel (aio.Channel): The gRPC AsyncIO channel associated with the
+ service that implements the ``google.longrunning.operations``
+ interface.
+ client_config (dict):
+ A dictionary of call options for each method. If not specified
+ the default configuration is used.
+ """
+
+ def __init__(self, channel, client_config=operations_client_config.config):
+ # Create the gRPC client stub with gRPC AsyncIO channel.
+ self.operations_stub = operations_pb2.OperationsStub(channel)
+
+ # Create all wrapped methods using the interface configuration.
+ # The interface config contains all of the default settings for retry
+ # and timeout for each RPC method.
+ interfaces = client_config["interfaces"]
+ interface_config = interfaces["google.longrunning.Operations"]
+ method_configs = gapic_v1.config_async.parse_method_configs(interface_config)
+
+ self._get_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.GetOperation,
+ default_retry=method_configs["GetOperation"].retry,
+ default_timeout=method_configs["GetOperation"].timeout,
+ )
+
+ self._list_operations = gapic_v1.method_async.wrap_method(
+ self.operations_stub.ListOperations,
+ default_retry=method_configs["ListOperations"].retry,
+ default_timeout=method_configs["ListOperations"].timeout,
+ )
+
+ self._cancel_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.CancelOperation,
+ default_retry=method_configs["CancelOperation"].retry,
+ default_timeout=method_configs["CancelOperation"].timeout,
+ )
+
+ self._delete_operation = gapic_v1.method_async.wrap_method(
+ self.operations_stub.DeleteOperation,
+ default_retry=method_configs["DeleteOperation"].retry,
+ default_timeout=method_configs["DeleteOperation"].timeout,
+ )
+
+ async def get_operation(
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """Gets the latest state of a long-running operation.
+
+ Clients can use this method to poll the operation result at intervals
+ as recommended by the API service.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> response = await api.get_operation(name)
+
+ Args:
+ name (str): The name of the operation resource.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]):
+ Additional gRPC metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The state of the
+ operation.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ return await self._get_operation(
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ async def list_operations(
+ self,
+ name,
+ filter_,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists operations that match the specified filter in the request.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for operation in await api.list_operations(name):
+ >>> # process operation
+ >>> pass
+ >>>
+ >>> # Or iterate over results one page at a time
+ >>> iter = await api.list_operations(name)
+ >>> for page in iter.pages:
+ >>> for operation in page:
+ >>> # process operation
+ >>> pass
+
+ Args:
+ name (str): The name of the operation collection.
+ filter_ (str): The standard list filter.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Returns:
+ google.api_core.page_iterator.Iterator: An iterator that yields
+ :class:`google.longrunning.operations_pb2.Operation` instances.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ # Create the method used to fetch pages
+ method = functools.partial(
+ self._list_operations, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ client=None,
+ method=method,
+ request=request,
+ items_field="operations",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+
+ return iterator
+
+ async def cancel_operation(
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """Starts asynchronous cancellation on a long-running operation.
+
+ The server makes a best effort to cancel the operation, but success is
+ not guaranteed. Clients can use :meth:`get_operation` or service-
+ specific methods to check whether the cancellation succeeded or whether
+ the operation completed despite cancellation. On successful
+ cancellation, the operation is not deleted; instead, it becomes an
+ operation with an ``Operation.error`` value with a
+ ``google.rpc.Status.code`` of ``1``, corresponding to
+ ``Code.CANCELLED``.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.cancel_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be cancelled.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ await self._cancel_operation(
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ async def delete_operation(
+ self,
+ name,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ metadata=None,
+ ):
+ """Deletes a long-running operation.
+
+ This method indicates that the client is no longer interested in the
+ operation result. It does not cancel the operation.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.delete_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be deleted.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ await self._delete_operation(
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py
new file mode 100644
index 0000000..e48eac0
--- /dev/null
+++ b/google/api_core/operations_v1/operations_client.py
@@ -0,0 +1,332 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A client for the google.longrunning.operations meta-API.
+
+This is a client that deals with long-running operations that follow the
+pattern outlined by the `Google API Style Guide`_.
+
+When an API method normally takes long time to complete, it can be designed to
+return ``Operation`` to the client, and the client can use this interface to
+receive the real response asynchronously by polling the operation resource to
+receive the response.
+
+It is not a separate service, but rather an interface implemented by a larger
+service. The protocol-level definition is available at
+`google/longrunning/operations.proto`_. Typically, this will be constructed
+automatically by another client class to deal with operations.
+
+.. _Google API Style Guide:
+ https://cloud.google.com/apis/design/design_pattern
+ s#long_running_operations
+.. _google/longrunning/operations.proto:
+ https://github.com/googleapis/googleapis/blob/master/google/longrunning
+ /operations.proto
+"""
+
+import functools
+
+from google.api_core import gapic_v1
+from google.api_core import page_iterator
+from google.api_core.operations_v1 import operations_client_config
+from google.longrunning import operations_pb2
+
+
+class OperationsClient(object):
+ """Client for interacting with long-running operations within a service.
+
+ Args:
+ channel (grpc.Channel): The gRPC channel associated with the service
+ that implements the ``google.longrunning.operations`` interface.
+ client_config (dict):
+ A dictionary of call options for each method. If not specified
+ the default configuration is used.
+ """
+
+ def __init__(self, channel, client_config=operations_client_config.config):
+ # Create the gRPC client stub.
+ self.operations_stub = operations_pb2.OperationsStub(channel)
+
+ # Create all wrapped methods using the interface configuration.
+ # The interface config contains all of the default settings for retry
+ # and timeout for each RPC method.
+ interfaces = client_config["interfaces"]
+ interface_config = interfaces["google.longrunning.Operations"]
+ method_configs = gapic_v1.config.parse_method_configs(interface_config)
+
+ self._get_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.GetOperation,
+ default_retry=method_configs["GetOperation"].retry,
+ default_timeout=method_configs["GetOperation"].timeout,
+ )
+
+ self._list_operations = gapic_v1.method.wrap_method(
+ self.operations_stub.ListOperations,
+ default_retry=method_configs["ListOperations"].retry,
+ default_timeout=method_configs["ListOperations"].timeout,
+ )
+
+ self._cancel_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.CancelOperation,
+ default_retry=method_configs["CancelOperation"].retry,
+ default_timeout=method_configs["CancelOperation"].timeout,
+ )
+
+ self._delete_operation = gapic_v1.method.wrap_method(
+ self.operations_stub.DeleteOperation,
+ default_retry=method_configs["DeleteOperation"].retry,
+ default_timeout=method_configs["DeleteOperation"].timeout,
+ )
+
+ # Service calls
+ def get_operation(
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """Gets the latest state of a long-running operation.
+
+ Clients can use this method to poll the operation result at intervals
+ as recommended by the API service.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> response = api.get_operation(name)
+
+ Args:
+ name (str): The name of the operation resource.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]):
+ Additional gRPC metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation: The state of the
+ operation.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ return self._get_operation(
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def list_operations(
+ self,
+ name,
+ filter_,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Lists operations that match the specified filter in the request.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>>
+ >>> # Iterate over all results
+ >>> for operation in api.list_operations(name):
+ >>> # process operation
+ >>> pass
+ >>>
+ >>> # Or iterate over results one page at a time
+ >>> iter = api.list_operations(name)
+ >>> for page in iter.pages:
+ >>> for operation in page:
+ >>> # process operation
+ >>> pass
+
+ Args:
+ name (str): The name of the operation collection.
+ filter_ (str): The standard list filter.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Returns:
+ google.api_core.page_iterator.Iterator: An iterator that yields
+ :class:`google.longrunning.operations_pb2.Operation` instances.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ # Create the method used to fetch pages
+ method = functools.partial(
+ self._list_operations, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ iterator = page_iterator.GRPCIterator(
+ client=None,
+ method=method,
+ request=request,
+ items_field="operations",
+ request_token_field="page_token",
+ response_token_field="next_page_token",
+ )
+
+ return iterator
+
+ def cancel_operation(
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """Starts asynchronous cancellation on a long-running operation.
+
+ The server makes a best effort to cancel the operation, but success is
+ not guaranteed. Clients can use :meth:`get_operation` or service-
+ specific methods to check whether the cancellation succeeded or whether
+ the operation completed despite cancellation. On successful
+ cancellation, the operation is not deleted; instead, it becomes an
+ operation with an ``Operation.error`` value with a
+ ``google.rpc.Status.code`` of ``1``, corresponding to
+ ``Code.CANCELLED``.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.cancel_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be cancelled.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ self._cancel_operation(request, retry=retry, timeout=timeout, metadata=metadata)
+
+ def delete_operation(
+ self,
+ name,
+ retry=gapic_v1.method.DEFAULT,
+ timeout=gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """Deletes a long-running operation.
+
+ This method indicates that the client is no longer interested in the
+ operation result. It does not cancel the operation.
+
+ Example:
+ >>> from google.api_core import operations_v1
+ >>> api = operations_v1.OperationsClient()
+ >>> name = ''
+ >>> api.delete_operation(name)
+
+ Args:
+ name (str): The name of the operation resource to be deleted.
+ retry (google.api_core.retry.Retry): The retry strategy to use
+ when invoking the RPC. If unspecified, the default retry from
+ the client configuration will be used. If ``None``, then this
+ method will not retry the RPC at all.
+ timeout (float): The amount of time in seconds to wait for the RPC
+ to complete. Note that if ``retry`` is used, this timeout
+ applies to each individual attempt and the overall time it
+ takes for this method to complete may be longer. If
+ unspecified, the the default timeout in the client
+ configuration is used. If ``None``, then the RPC method will
+ not time out.
+ metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
+ metadata.
+
+ Raises:
+ google.api_core.exceptions.MethodNotImplemented: If the server
+ does not support this method. Services are not required to
+ implement this method.
+ google.api_core.exceptions.GoogleAPICallError: If an error occurred
+ while invoking the RPC, the appropriate ``GoogleAPICallError``
+ subclass will be raised.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Add routing header
+ metadata = metadata or []
+ metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
+
+ self._delete_operation(request, retry=retry, timeout=timeout, metadata=metadata)
diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py
new file mode 100644
index 0000000..6cf9575
--- /dev/null
+++ b/google/api_core/operations_v1/operations_client_config.py
@@ -0,0 +1,59 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""gapic configuration for the googe.longrunning.operations client."""
+
+config = {
+ "interfaces": {
+ "google.longrunning.Operations": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 20000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ }
+ },
+ "methods": {
+ "GetOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "ListOperations": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "CancelOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "DeleteOperation": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ },
+ }
+ }
+}
diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py
new file mode 100644
index 0000000..b8a4775
--- /dev/null
+++ b/google/api_core/operations_v1/pagers.py
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ Callable,
+ Iterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+
+
+class ListOperationsPager:
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.longrunning.operations_pb2.ListOperationsRequest):
+ The initial request object.
+ response (google.longrunning.operations_pb2.ListOperationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = request
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterator[operations_pb2.Operation]:
+ for page in self.pages:
+ yield from page.operations
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py
new file mode 100644
index 0000000..b443c07
--- /dev/null
+++ b/google/api_core/operations_v1/transports/__init__.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import OperationsTransport
+from .rest import OperationsRestTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+_transport_registry["rest"] = OperationsRestTransport
+
+__all__ = (
+ "OperationsTransport",
+ "OperationsRestTransport",
+)
diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py
new file mode 100644
index 0000000..460e646
--- /dev/null
+++ b/google/api_core/operations_v1/transports/base.py
@@ -0,0 +1,232 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Optional, Sequence, Union
+
+import pkg_resources
+
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google.api_core.operations_v1",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class OperationsTransport(abc.ABC):
+ """Abstract transport class for Operations."""
+
+ AUTH_SCOPES = ()
+
+ DEFAULT_HOST: str = "longrunning.googleapis.com"
+
+ def __init__(
+ self,
+ *,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+ # Save the scopes.
+ self._scopes = scopes
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise core_exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ # If the credentials are service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ self.delete_operation: gapic_v1.method.wrap_method(
+ self.delete_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ }
+
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Union[
+ operations_pb2.ListOperationsResponse,
+ Awaitable[operations_pb2.ListOperationsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("OperationsTransport",)
diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py
new file mode 100644
index 0000000..27ed766
--- /dev/null
+++ b/google/api_core/operations_v1/transports/rest.py
@@ -0,0 +1,455 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from requests import __version__ as requests_version
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+OptionalRetry = Union[retries.Retry, object]
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=requests_version,
+)
+
+
+class OperationsRestTransport(OperationsTransport):
+ """REST backend transport for Operations.
+
+ Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+ certificate to configure mutual TLS HTTP channel. It is ignored
+ if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operatons.proto. Each method has an entry
+ with the corresponding http rules as value.
+
+ """
+ # Run the base constructor
+ # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+ # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+ # credentials object
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+ self._session = AuthorizedSession(
+ self._credentials, default_host=self.DEFAULT_HOST
+ )
+ if client_cert_source_for_mtls:
+ self._session.configure_mtls_channel(client_cert_source_for_mtls)
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+
+ def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {"method": "get", "uri": "/v1/{name=operations}"},
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ unning operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {"method": "get", "uri": "/v1/{name=operations/**}"},
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {"method": "delete", "uri": "/v1/{name=operations/**}"},
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {"method": "post", "uri": "/v1/{name=operations/**}:cancel", "body": "*"},
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[[operations_pb2.DeleteOperationRequest], empty_pb2.Empty]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[[operations_pb2.CancelOperationRequest], empty_pb2.Empty]:
+ return self._cancel_operation
+
+
+__all__ = ("OperationsRestTransport",)
diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py
new file mode 100644
index 0000000..7ddc5cb
--- /dev/null
+++ b/google/api_core/page_iterator.py
@@ -0,0 +1,571 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Iterators for paging through paged API methods.
+
+These iterators simplify the process of paging through API responses
+where the request takes a page token and the response is a list of results with
+a token for the next page. See `list pagination`_ in the Google API Style Guide
+for more details.
+
+.. _list pagination:
+ https://cloud.google.com/apis/design/design_patterns#list_pagination
+
+API clients that have methods that follow the list pagination pattern can
+return an :class:`.Iterator`. You can use this iterator to get **all** of
+the results across all pages::
+
+ >>> results_iterator = client.list_resources()
+ >>> list(results_iterator) # Convert to a list (consumes all values).
+
+Or you can walk your way through items and call off the search early if
+you find what you're looking for (resulting in possibly fewer requests)::
+
+ >>> for resource in results_iterator:
+ ... print(resource.name)
+ ... if not resource.is_valid:
+ ... break
+
+At any point, you may check the number of items consumed by referencing the
+``num_results`` property of the iterator::
+
+ >>> for my_item in results_iterator:
+ ... if results_iterator.num_results >= 10:
+ ... break
+
+When iterating, not every new item will send a request to the server.
+To iterate based on each page of items (where a page corresponds to
+a request)::
+
+ >>> for page in results_iterator.pages:
+ ... print('=' * 20)
+ ... print(' Page number: {:d}'.format(iterator.page_number))
+ ... print(' Items in page: {:d}'.format(page.num_items))
+ ... print(' First item: {!r}'.format(next(page)))
+ ... print('Items remaining: {:d}'.format(page.remaining))
+ ... print('Next page token: {}'.format(iterator.next_page_token))
+ ====================
+ Page number: 1
+ Items in page: 1
+ First item: <MyItemClass at 0x7f1d3cccf690>
+ Items remaining: 0
+ Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
+ ====================
+ Page number: 2
+ Items in page: 19
+ First item: <MyItemClass at 0x7f1d3cccffd0>
+ Items remaining: 18
+ Next page token: None
+
+Then, for each page you can get all the resources on that page by iterating
+through it or using :func:`list`::
+
+ >>> list(page)
+ [
+ <MyItemClass at 0x7fd64a098ad0>,
+ <MyItemClass at 0x7fd64a098ed0>,
+ <MyItemClass at 0x7fd64a098e90>,
+ ]
+"""
+
+import abc
+
+
+class Page(object):
+ """Single page of results in an iterator.
+
+ Args:
+ parent (google.api_core.page_iterator.Iterator): The iterator that owns
+ the current page.
+ items (Sequence[Any]): An iterable (that also defines __len__) of items
+ from a raw API response.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ raw_page Optional[google.protobuf.message.Message]:
+ The raw page response.
+ """
+
+ def __init__(self, parent, items, item_to_value, raw_page=None):
+ self._parent = parent
+ self._num_items = len(items)
+ self._remaining = self._num_items
+ self._item_iter = iter(items)
+ self._item_to_value = item_to_value
+ self._raw_page = raw_page
+
+ @property
+ def raw_page(self):
+ """google.protobuf.message.Message"""
+ return self._raw_page
+
+ @property
+ def num_items(self):
+ """int: Total items in the page."""
+ return self._num_items
+
+ @property
+ def remaining(self):
+ """int: Remaining items in the page."""
+ return self._remaining
+
+ def __iter__(self):
+ """The :class:`Page` is an iterator of items."""
+ return self
+
+ def __next__(self):
+ """Get the next value in the page."""
+ item = next(self._item_iter)
+ result = self._item_to_value(self._parent, item)
+ # Since we've successfully got the next value from the
+ # iterator, we update the number of remaining.
+ self._remaining -= 1
+ return result
+
+
+def _item_to_value_identity(iterator, item):
+ """An item to value transformer that returns the item un-changed."""
+ # pylint: disable=unused-argument
+ # We are conforming to the interface defined by Iterator.
+ return item
+
+
+class Iterator(object, metaclass=abc.ABCMeta):
+ """A generic class for iterating through API list responses.
+
+ Args:
+ client(google.cloud.client.Client): The API client.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ """
+
+ def __init__(
+ self,
+ client,
+ item_to_value=_item_to_value_identity,
+ page_token=None,
+ max_results=None,
+ ):
+ self._started = False
+ self.__active_iterator = None
+
+ self.client = client
+ """Optional[Any]: The client that created this iterator."""
+ self.item_to_value = item_to_value
+ """Callable[Iterator, Any]: Callable to convert an item from the type
+ in the raw API response into the native object. Will be called with
+ the iterator and a
+ single item.
+ """
+ self.max_results = max_results
+ """int: The maximum number of results to fetch"""
+
+ # The attributes below will change over the life of the iterator.
+ self.page_number = 0
+ """int: The current page of results."""
+ self.next_page_token = page_token
+ """str: The token for the next page of results. If this is set before
+ the iterator starts, it effectively offsets the iterator to a
+ specific starting point."""
+ self.num_results = 0
+ """int: The total number of results fetched so far."""
+
+ @property
+ def pages(self):
+ """Iterator of pages in the response.
+
+ returns:
+ types.GeneratorType[google.api_core.page_iterator.Page]: A
+ generator of page instances.
+
+ raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._page_iter(increment=True)
+
+ def _items_iter(self):
+ """Iterator for each item returned."""
+ for page in self._page_iter(increment=False):
+ for item in page:
+ self.num_results += 1
+ yield item
+
+ def __iter__(self):
+ """Iterator for each item returned.
+
+ Returns:
+ types.GeneratorType[Any]: A generator of items from the API.
+
+ Raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._items_iter()
+
+ def __next__(self):
+ if self.__active_iterator is None:
+ self.__active_iterator = iter(self)
+ return next(self.__active_iterator)
+
+ def _page_iter(self, increment):
+ """Generator of pages of API responses.
+
+ Args:
+ increment (bool): Flag indicating if the total number of results
+ should be incremented on each page. This is useful since a page
+ iterator will want to increment by results per page while an
+ items iterator will want to increment per item.
+
+ Yields:
+ Page: each page of items from the API.
+ """
+ page = self._next_page()
+ while page is not None:
+ self.page_number += 1
+ if increment:
+ self.num_results += page.num_items
+ yield page
+ page = self._next_page()
+
+ @abc.abstractmethod
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ This does nothing and is intended to be over-ridden by subclasses
+ to return the next :class:`Page`.
+
+ Raises:
+ NotImplementedError: Always, this method is abstract.
+ """
+ raise NotImplementedError
+
+
+def _do_nothing_page_start(iterator, page, response):
+ """Helper to provide custom behavior after a :class:`Page` is started.
+
+ This is a do-nothing stand-in as the default value.
+
+ Args:
+ iterator (Iterator): An iterator that holds some request info.
+ page (Page): The page that was just created.
+ response (Any): The API response for a page.
+ """
+ # pylint: disable=unused-argument
+ pass
+
+
+class HTTPIterator(Iterator):
+ """A generic class for iterating through HTTP/JSON API list responses.
+
+ To make an iterator work, you'll need to provide a way to convert a JSON
+ item returned from the API into the object of your choice (via
+ ``item_to_value``). You also may need to specify a custom ``items_key`` so
+ that a given response (containing a page of results) can be parsed into an
+ iterable page of the actual objects you want.
+
+ Args:
+ client (google.cloud.client.Client): The API client.
+ api_request (Callable): The function to use to make API requests.
+ Generally, this will be
+ :meth:`google.cloud._http.JSONConnection.api_request`.
+ path (str): The method path to query for the list of items.
+ item_to_value (Callable[google.api_core.page_iterator.Iterator, Any]):
+ Callable to convert an item from the type in the JSON response into
+ a native object. Will be called with the iterator and a single
+ item.
+ items_key (str): The key in the API response where the list of items
+ can be found.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ page_size (int): The maximum number of results to fetch per page
+ max_results (int): The maximum number of results to fetch
+ extra_params (dict): Extra query string parameters for the
+ API call.
+ page_start (Callable[
+ google.api_core.page_iterator.Iterator,
+ google.api_core.page_iterator.Page, dict]): Callable to provide
+ any special behavior after a new page has been created. Assumed
+ signature takes the :class:`.Iterator` that started the page,
+ the :class:`.Page` that was started and the dictionary containing
+ the page response.
+ next_token (str): The name of the field used in the response for page
+ tokens.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_ITEMS_KEY = "items"
+ _PAGE_TOKEN = "pageToken"
+ _MAX_RESULTS = "maxResults"
+ _NEXT_TOKEN = "nextPageToken"
+ _RESERVED_PARAMS = frozenset([_PAGE_TOKEN])
+ _HTTP_METHOD = "GET"
+
+ def __init__(
+ self,
+ client,
+ api_request,
+ path,
+ item_to_value,
+ items_key=_DEFAULT_ITEMS_KEY,
+ page_token=None,
+ page_size=None,
+ max_results=None,
+ extra_params=None,
+ page_start=_do_nothing_page_start,
+ next_token=_NEXT_TOKEN,
+ ):
+ super(HTTPIterator, self).__init__(
+ client, item_to_value, page_token=page_token, max_results=max_results
+ )
+ self.api_request = api_request
+ self.path = path
+ self._items_key = items_key
+ self.extra_params = extra_params
+ self._page_size = page_size
+ self._page_start = page_start
+ self._next_token = next_token
+ # Verify inputs / provide defaults.
+ if self.extra_params is None:
+ self.extra_params = {}
+ self._verify_params()
+
+ def _verify_params(self):
+ """Verifies the parameters don't use any reserved parameter.
+
+ Raises:
+ ValueError: If a reserved parameter is used.
+ """
+ reserved_in_use = self._RESERVED_PARAMS.intersection(self.extra_params)
+ if reserved_in_use:
+ raise ValueError("Using a reserved parameter", reserved_in_use)
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Optional[Page]: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if self._has_next_page():
+ response = self._get_next_page_response()
+ items = response.get(self._items_key, ())
+ page = Page(self, items, self.item_to_value, raw_page=response)
+ self._page_start(self, page, response)
+ self.next_page_token = response.get(self._next_token)
+ return page
+ else:
+ return None
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ return self.next_page_token is not None
+
+ def _get_query_params(self):
+ """Getter for query parameters for the next request.
+
+ Returns:
+ dict: A dictionary of query parameters.
+ """
+ result = {}
+ if self.next_page_token is not None:
+ result[self._PAGE_TOKEN] = self.next_page_token
+
+ page_size = None
+ if self.max_results is not None:
+ page_size = self.max_results - self.num_results
+ if self._page_size is not None:
+ page_size = min(page_size, self._page_size)
+ elif self._page_size is not None:
+ page_size = self._page_size
+
+ if page_size is not None:
+ result[self._MAX_RESULTS] = page_size
+
+ result.update(self.extra_params)
+ return result
+
+ def _get_next_page_response(self):
+ """Requests the next page from the path provided.
+
+ Returns:
+ dict: The parsed JSON response of the next page's contents.
+
+ Raises:
+ ValueError: If the HTTP method is not ``GET`` or ``POST``.
+ """
+ params = self._get_query_params()
+ if self._HTTP_METHOD == "GET":
+ return self.api_request(
+ method=self._HTTP_METHOD, path=self.path, query_params=params
+ )
+ elif self._HTTP_METHOD == "POST":
+ return self.api_request(
+ method=self._HTTP_METHOD, path=self.path, data=params
+ )
+ else:
+ raise ValueError("Unexpected HTTP method", self._HTTP_METHOD)
+
+
+class _GAXIterator(Iterator):
+ """A generic class for iterating through Cloud gRPC APIs list responses.
+
+ Any:
+ client (google.cloud.client.Client): The API client.
+ page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped
+ to conform to the :class:`Iterator` interface.
+ item_to_value (Callable[Iterator, Any]): Callable to convert an item
+ from the the protobuf response into a native object. Will
+ be called with the iterator and a single item.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ def __init__(self, client, page_iter, item_to_value, max_results=None):
+ super(_GAXIterator, self).__init__(
+ client,
+ item_to_value,
+ page_token=page_iter.page_token,
+ max_results=max_results,
+ )
+ self._gax_page_iter = page_iter
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Wraps the response from the :class:`~google.gax.PageIterator` in a
+ :class:`Page` instance and captures some state at each page.
+
+ Returns:
+ Optional[Page]: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ try:
+ items = next(self._gax_page_iter)
+ page = Page(self, items, self.item_to_value)
+ self.next_page_token = self._gax_page_iter.page_token or None
+ return page
+ except StopIteration:
+ return None
+
+
+class GRPCIterator(Iterator):
+ """A generic class for iterating through gRPC list responses.
+
+ .. note:: The class does not take a ``page_token`` argument because it can
+ just be specified in the ``request``.
+
+ Args:
+ client (google.cloud.client.Client): The API client. This unused by
+ this class, but kept to satisfy the :class:`Iterator` interface.
+ method (Callable[protobuf.Message]): A bound gRPC method that should
+ take a single message for the request.
+ request (protobuf.Message): The request message.
+ items_field (str): The field in the response message that has the
+ items for the page.
+ item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
+ item from the type in the JSON response into a native object. Will
+ be called with the iterator and a single item.
+ request_token_field (str): The field in the request message used to
+ specify the page token.
+ response_token_field (str): The field in the response message that has
+ the token for the next page.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+ _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
+
+ def __init__(
+ self,
+ client,
+ method,
+ request,
+ items_field,
+ item_to_value=_item_to_value_identity,
+ request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+ response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+ max_results=None,
+ ):
+ super(GRPCIterator, self).__init__(
+ client, item_to_value, max_results=max_results
+ )
+ self._method = method
+ self._request = request
+ self._items_field = items_field
+ self._request_token_field = request_token_field
+ self._response_token_field = response_token_field
+
+ def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Page: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if not self._has_next_page():
+ return None
+
+ if self.next_page_token is not None:
+ setattr(self._request, self._request_token_field, self.next_page_token)
+
+ response = self._method(self._request)
+
+ self.next_page_token = getattr(response, self._response_token_field)
+ items = getattr(response, self._items_field)
+ page = Page(self, items, self.item_to_value, raw_page=response)
+
+ return page
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ # Note: intentionally a falsy check instead of a None check. The RPC
+ # can return an empty string indicating no more pages.
+ return True if self.next_page_token else False
diff --git a/google/api_core/page_iterator_async.py b/google/api_core/page_iterator_async.py
new file mode 100644
index 0000000..c072575
--- /dev/null
+++ b/google/api_core/page_iterator_async.py
@@ -0,0 +1,285 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""AsyncIO iterators for paging through paged API methods.
+
+These iterators simplify the process of paging through API responses
+where the request takes a page token and the response is a list of results with
+a token for the next page. See `list pagination`_ in the Google API Style Guide
+for more details.
+
+.. _list pagination:
+ https://cloud.google.com/apis/design/design_patterns#list_pagination
+
+API clients that have methods that follow the list pagination pattern can
+return an :class:`.AsyncIterator`:
+
+ >>> results_iterator = await client.list_resources()
+
+Or you can walk your way through items and call off the search early if
+you find what you're looking for (resulting in possibly fewer requests)::
+
+ >>> async for resource in results_iterator:
+ ... print(resource.name)
+ ... if not resource.is_valid:
+ ... break
+
+At any point, you may check the number of items consumed by referencing the
+``num_results`` property of the iterator::
+
+ >>> async for my_item in results_iterator:
+ ... if results_iterator.num_results >= 10:
+ ... break
+
+When iterating, not every new item will send a request to the server.
+To iterate based on each page of items (where a page corresponds to
+a request)::
+
+ >>> async for page in results_iterator.pages:
+ ... print('=' * 20)
+ ... print(' Page number: {:d}'.format(iterator.page_number))
+ ... print(' Items in page: {:d}'.format(page.num_items))
+ ... print(' First item: {!r}'.format(next(page)))
+ ... print('Items remaining: {:d}'.format(page.remaining))
+ ... print('Next page token: {}'.format(iterator.next_page_token))
+ ====================
+ Page number: 1
+ Items in page: 1
+ First item: <MyItemClass at 0x7f1d3cccf690>
+ Items remaining: 0
+ Next page token: eav1OzQB0OM8rLdGXOEsyQWSG
+ ====================
+ Page number: 2
+ Items in page: 19
+ First item: <MyItemClass at 0x7f1d3cccffd0>
+ Items remaining: 18
+ Next page token: None
+"""
+
+import abc
+
+from google.api_core.page_iterator import Page
+
+
+def _item_to_value_identity(iterator, item):
+ """An item to value transformer that returns the item un-changed."""
+ # pylint: disable=unused-argument
+ # We are conforming to the interface defined by Iterator.
+ return item
+
+
+class AsyncIterator(abc.ABC):
+ """A generic class for iterating through API list responses.
+
+ Args:
+ client(google.cloud.client.Client): The API client.
+ item_to_value (Callable[google.api_core.page_iterator_async.AsyncIterator, Any]):
+ Callable to convert an item from the type in the raw API response
+ into the native object. Will be called with the iterator and a
+ single item.
+ page_token (str): A token identifying a page in a result set to start
+ fetching results from.
+ max_results (int): The maximum number of results to fetch.
+ """
+
+ def __init__(
+ self,
+ client,
+ item_to_value=_item_to_value_identity,
+ page_token=None,
+ max_results=None,
+ ):
+ self._started = False
+ self.__active_aiterator = None
+
+ self.client = client
+ """Optional[Any]: The client that created this iterator."""
+ self.item_to_value = item_to_value
+ """Callable[Iterator, Any]: Callable to convert an item from the type
+ in the raw API response into the native object. Will be called with
+ the iterator and a
+ single item.
+ """
+ self.max_results = max_results
+ """int: The maximum number of results to fetch."""
+
+ # The attributes below will change over the life of the iterator.
+ self.page_number = 0
+ """int: The current page of results."""
+ self.next_page_token = page_token
+ """str: The token for the next page of results. If this is set before
+ the iterator starts, it effectively offsets the iterator to a
+ specific starting point."""
+ self.num_results = 0
+ """int: The total number of results fetched so far."""
+
+ @property
+ def pages(self):
+ """Iterator of pages in the response.
+
+ returns:
+ types.GeneratorType[google.api_core.page_iterator.Page]: A
+ generator of page instances.
+
+ raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._page_aiter(increment=True)
+
+ async def _items_aiter(self):
+ """Iterator for each item returned."""
+ async for page in self._page_aiter(increment=False):
+ for item in page:
+ self.num_results += 1
+ yield item
+
+ def __aiter__(self):
+ """Iterator for each item returned.
+
+ Returns:
+ types.GeneratorType[Any]: A generator of items from the API.
+
+ Raises:
+ ValueError: If the iterator has already been started.
+ """
+ if self._started:
+ raise ValueError("Iterator has already started", self)
+ self._started = True
+ return self._items_aiter()
+
+ async def __anext__(self):
+ if self.__active_aiterator is None:
+ self.__active_aiterator = self.__aiter__()
+ return await self.__active_aiterator.__anext__()
+
+ async def _page_aiter(self, increment):
+ """Generator of pages of API responses.
+
+ Args:
+ increment (bool): Flag indicating if the total number of results
+ should be incremented on each page. This is useful since a page
+ iterator will want to increment by results per page while an
+ items iterator will want to increment per item.
+
+ Yields:
+ Page: each page of items from the API.
+ """
+ page = await self._next_page()
+ while page is not None:
+ self.page_number += 1
+ if increment:
+ self.num_results += page.num_items
+ yield page
+ page = await self._next_page()
+
+ @abc.abstractmethod
+ async def _next_page(self):
+ """Get the next page in the iterator.
+
+ This does nothing and is intended to be over-ridden by subclasses
+ to return the next :class:`Page`.
+
+ Raises:
+ NotImplementedError: Always, this method is abstract.
+ """
+ raise NotImplementedError
+
+
+class AsyncGRPCIterator(AsyncIterator):
+ """A generic class for iterating through gRPC list responses.
+
+ .. note:: The class does not take a ``page_token`` argument because it can
+ just be specified in the ``request``.
+
+ Args:
+ client (google.cloud.client.Client): The API client. This unused by
+ this class, but kept to satisfy the :class:`Iterator` interface.
+ method (Callable[protobuf.Message]): A bound gRPC method that should
+ take a single message for the request.
+ request (protobuf.Message): The request message.
+ items_field (str): The field in the response message that has the
+ items for the page.
+ item_to_value (Callable[GRPCIterator, Any]): Callable to convert an
+ item from the type in the JSON response into a native object. Will
+ be called with the iterator and a single item.
+ request_token_field (str): The field in the request message used to
+ specify the page token.
+ response_token_field (str): The field in the response message that has
+ the token for the next page.
+ max_results (int): The maximum number of results to fetch.
+
+ .. autoattribute:: pages
+ """
+
+ _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+ _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
+
+ def __init__(
+ self,
+ client,
+ method,
+ request,
+ items_field,
+ item_to_value=_item_to_value_identity,
+ request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+ response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+ max_results=None,
+ ):
+ super().__init__(client, item_to_value, max_results=max_results)
+ self._method = method
+ self._request = request
+ self._items_field = items_field
+ self._request_token_field = request_token_field
+ self._response_token_field = response_token_field
+
+ async def _next_page(self):
+ """Get the next page in the iterator.
+
+ Returns:
+ Page: The next page in the iterator or :data:`None` if
+ there are no pages left.
+ """
+ if not self._has_next_page():
+ return None
+
+ if self.next_page_token is not None:
+ setattr(self._request, self._request_token_field, self.next_page_token)
+
+ response = await self._method(self._request)
+
+ self.next_page_token = getattr(response, self._response_token_field)
+ items = getattr(response, self._items_field)
+ page = Page(self, items, self.item_to_value, raw_page=response)
+
+ return page
+
+ def _has_next_page(self):
+ """Determines whether or not there are more pages with results.
+
+ Returns:
+ bool: Whether the iterator has more pages.
+ """
+ if self.page_number == 0:
+ return True
+
+ # Note: intentionally a falsy check instead of a None check. The RPC
+ # can return an empty string indicating no more pages.
+ if self.max_results is not None:
+ if self.num_results >= self.max_results:
+ return False
+
+ return True if self.next_page_token else False
diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py
new file mode 100644
index 0000000..41fbd4f
--- /dev/null
+++ b/google/api_core/path_template.py
@@ -0,0 +1,300 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Expand and validate URL path templates.
+
+This module provides the :func:`expand` and :func:`validate` functions for
+interacting with Google-style URL `path templates`_ which are commonly used
+in Google APIs for `resource names`_.
+
+.. _path templates: https://github.com/googleapis/googleapis/blob
+ /57e2d376ac7ef48681554204a3ba78a414f2c533/google/api/http.proto#L212
+.. _resource names: https://cloud.google.com/apis/design/resource_names
+"""
+
+from __future__ import unicode_literals
+
+from collections import deque
+import copy
+import functools
+import re
+
+# Regular expression for extracting variable parts from a path template.
+# The variables can be expressed as:
+#
+# - "*": a single-segment positional variable, for example: "books/*"
+# - "**": a multi-segment positional variable, for example: "shelf/**/book/*"
+# - "{name}": a single-segment wildcard named variable, for example
+# "books/{name}"
+# - "{name=*}: same as above.
+# - "{name=**}": a multi-segment wildcard named variable, for example
+# "shelf/{name=**}"
+# - "{name=/path/*/**}": a multi-segment named variable with a sub-template.
+_VARIABLE_RE = re.compile(
+ r"""
+ ( # Capture the entire variable expression
+ (?P<positional>\*\*?) # Match & capture * and ** positional variables.
+ |
+ # Match & capture named variables {name}
+ {
+ (?P<name>[^/]+?)
+ # Optionally match and capture the named variable's template.
+ (?:=(?P<template>.+?))?
+ }
+ )
+ """,
+ re.VERBOSE,
+)
+
+# Segment expressions used for validating paths against a template.
+_SINGLE_SEGMENT_PATTERN = r"([^/]+)"
+_MULTI_SEGMENT_PATTERN = r"(.+)"
+
+
+def _expand_variable_match(positional_vars, named_vars, match):
+ """Expand a matched variable with its value.
+
+ Args:
+ positional_vars (list): A list of positional variables. This list will
+ be modified.
+ named_vars (dict): A dictionary of named variables.
+ match (re.Match): A regular expression match.
+
+ Returns:
+ str: The expanded variable to replace the match.
+
+ Raises:
+ ValueError: If a positional or named variable is required by the
+ template but not specified or if an unexpected template expression
+ is encountered.
+ """
+ positional = match.group("positional")
+ name = match.group("name")
+ if name is not None:
+ try:
+ return str(named_vars[name])
+ except KeyError:
+ raise ValueError(
+ "Named variable '{}' not specified and needed by template "
+ "`{}` at position {}".format(name, match.string, match.start())
+ )
+ elif positional is not None:
+ try:
+ return str(positional_vars.pop(0))
+ except IndexError:
+ raise ValueError(
+ "Positional variable not specified and needed by template "
+ "`{}` at position {}".format(match.string, match.start())
+ )
+ else:
+ raise ValueError("Unknown template expression {}".format(match.group(0)))
+
+
+def expand(tmpl, *args, **kwargs):
+ """Expand a path template with the given variables.
+
+ .. code-block:: python
+
+ >>> expand('users/*/messages/*', 'me', '123')
+ users/me/messages/123
+ >>> expand('/v1/{name=shelves/*/books/*}', name='shelves/1/books/3')
+ /v1/shelves/1/books/3
+
+ Args:
+ tmpl (str): The path template.
+ args: The positional variables for the path.
+ kwargs: The named variables for the path.
+
+ Returns:
+ str: The expanded path
+
+ Raises:
+ ValueError: If a positional or named variable is required by the
+ template but not specified or if an unexpected template expression
+ is encountered.
+ """
+ replacer = functools.partial(_expand_variable_match, list(args), kwargs)
+ return _VARIABLE_RE.sub(replacer, tmpl)
+
+
+def _replace_variable_with_pattern(match):
+ """Replace a variable match with a pattern that can be used to validate it.
+
+ Args:
+ match (re.Match): A regular expression match
+
+ Returns:
+ str: A regular expression pattern that can be used to validate the
+ variable in an expanded path.
+
+ Raises:
+ ValueError: If an unexpected template expression is encountered.
+ """
+ positional = match.group("positional")
+ name = match.group("name")
+ template = match.group("template")
+ if name is not None:
+ if not template:
+ return _SINGLE_SEGMENT_PATTERN.format(name)
+ elif template == "**":
+ return _MULTI_SEGMENT_PATTERN.format(name)
+ else:
+ return _generate_pattern_for_template(template)
+ elif positional == "*":
+ return _SINGLE_SEGMENT_PATTERN
+ elif positional == "**":
+ return _MULTI_SEGMENT_PATTERN
+ else:
+ raise ValueError("Unknown template expression {}".format(match.group(0)))
+
+
+def _generate_pattern_for_template(tmpl):
+ """Generate a pattern that can validate a path template.
+
+ Args:
+ tmpl (str): The path template
+
+ Returns:
+ str: A regular expression pattern that can be used to validate an
+ expanded path template.
+ """
+ return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl)
+
+
+def get_field(request, field):
+ """Get the value of a field from a given dictionary.
+
+ Args:
+ request (dict): A dictionary object.
+ field (str): The key to the request in dot notation.
+
+ Returns:
+ The value of the field.
+ """
+ parts = field.split(".")
+ value = request
+ for part in parts:
+ if not isinstance(value, dict):
+ return
+ value = value.get(part)
+ if isinstance(value, dict):
+ return
+ return value
+
+
+def delete_field(request, field):
+ """Delete the value of a field from a given dictionary.
+
+ Args:
+ request (dict): A dictionary object.
+ field (str): The key to the request in dot notation.
+ """
+ parts = deque(field.split("."))
+ while len(parts) > 1:
+ if not isinstance(request, dict):
+ return
+ part = parts.popleft()
+ request = request.get(part)
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ return
+ request.pop(part, None)
+
+
+def validate(tmpl, path):
+ """Validate a path against the path template.
+
+ .. code-block:: python
+
+ >>> validate('users/*/messages/*', 'users/me/messages/123')
+ True
+ >>> validate('users/*/messages/*', 'users/me/drafts/123')
+ False
+ >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/books/3)
+ True
+ >>> validate('/v1/{name=shelves/*/books/*}', /v1/shelves/1/tapes/3)
+ False
+
+ Args:
+ tmpl (str): The path template.
+ path (str): The expanded path.
+
+ Returns:
+ bool: True if the path matches.
+ """
+ pattern = _generate_pattern_for_template(tmpl) + "$"
+ return True if re.match(pattern, path) is not None else False
+
+
+def transcode(http_options, **request_kwargs):
+ """Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here,
+ https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
+
+ Args:
+ http_options (list(dict)): A list of dicts which consist of these keys,
+ 'method' (str): The http method
+ 'uri' (str): The path template
+ 'body' (str): The body field name (optional)
+ (This is a simplified representation of the proto option `google.api.http`)
+
+ request_kwargs (dict) : A dict representing the request object
+
+ Returns:
+ dict: The transcoded request with these keys,
+ 'method' (str) : The http method
+ 'uri' (str) : The expanded uri
+ 'body' (dict) : A dict representing the body (optional)
+ 'query_params' (dict) : A dict mapping query parameter variables and values
+
+ Raises:
+ ValueError: If the request does not match the given template.
+ """
+ for http_option in http_options:
+ request = {}
+
+ # Assign path
+ uri_template = http_option["uri"]
+ path_fields = [
+ match.group("name") for match in _VARIABLE_RE.finditer(uri_template)
+ ]
+ path_args = {field: get_field(request_kwargs, field) for field in path_fields}
+ request["uri"] = expand(uri_template, **path_args)
+
+ # Remove fields used in uri path from request
+ leftovers = copy.deepcopy(request_kwargs)
+ for path_field in path_fields:
+ delete_field(leftovers, path_field)
+
+ if not validate(uri_template, request["uri"]) or not all(path_args.values()):
+ continue
+
+ # Assign body and query params
+ body = http_option.get("body")
+
+ if body:
+ if body == "*":
+ request["body"] = leftovers
+ request["query_params"] = {}
+ else:
+ try:
+ request["body"] = leftovers.pop(body)
+ except KeyError:
+ continue
+ request["query_params"] = leftovers
+ else:
+ request["query_params"] = leftovers
+ request["method"] = http_option["method"]
+ return request
+
+ raise ValueError("Request obj does not match any template")
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
new file mode 100644
index 0000000..896e89c
--- /dev/null
+++ b/google/api_core/protobuf_helpers.py
@@ -0,0 +1,373 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for :mod:`protobuf`."""
+
+import collections
+import collections.abc
+import copy
+import inspect
+
+from google.protobuf import field_mask_pb2
+from google.protobuf import message
+from google.protobuf import wrappers_pb2
+
+
+_SENTINEL = object()
+_WRAPPER_TYPES = (
+ wrappers_pb2.BoolValue,
+ wrappers_pb2.BytesValue,
+ wrappers_pb2.DoubleValue,
+ wrappers_pb2.FloatValue,
+ wrappers_pb2.Int32Value,
+ wrappers_pb2.Int64Value,
+ wrappers_pb2.StringValue,
+ wrappers_pb2.UInt32Value,
+ wrappers_pb2.UInt64Value,
+)
+
+
+def from_any_pb(pb_type, any_pb):
+ """Converts an ``Any`` protobuf to the specified message type.
+
+ Args:
+ pb_type (type): the type of the message that any_pb stores an instance
+ of.
+ any_pb (google.protobuf.any_pb2.Any): the object to be converted.
+
+ Returns:
+ pb_type: An instance of the pb_type message.
+
+ Raises:
+ TypeError: if the message could not be converted.
+ """
+ msg = pb_type()
+
+ # Unwrap proto-plus wrapped messages.
+ if callable(getattr(pb_type, "pb", None)):
+ msg_pb = pb_type.pb(msg)
+ else:
+ msg_pb = msg
+
+ # Unpack the Any object and populate the protobuf message instance.
+ if not any_pb.Unpack(msg_pb):
+ raise TypeError(
+ "Could not convert {} to {}".format(
+ any_pb.__class__.__name__, pb_type.__name__
+ )
+ )
+
+ # Done; return the message.
+ return msg
+
+
+def check_oneof(**kwargs):
+ """Raise ValueError if more than one keyword argument is not ``None``.
+
+ Args:
+ kwargs (dict): The keyword arguments sent to the function.
+
+ Raises:
+ ValueError: If more than one entry in ``kwargs`` is not ``None``.
+ """
+ # Sanity check: If no keyword arguments were sent, this is fine.
+ if not kwargs:
+ return
+
+ not_nones = [val for val in kwargs.values() if val is not None]
+ if len(not_nones) > 1:
+ raise ValueError(
+ "Only one of {fields} should be set.".format(
+ fields=", ".join(sorted(kwargs.keys()))
+ )
+ )
+
+
+def get_messages(module):
+ """Discovers all protobuf Message classes in a given import module.
+
+ Args:
+ module (module): A Python module; :func:`dir` will be run against this
+ module to find Message subclasses.
+
+ Returns:
+ dict[str, google.protobuf.message.Message]: A dictionary with the
+ Message class names as keys, and the Message subclasses themselves
+ as values.
+ """
+ answer = collections.OrderedDict()
+ for name in dir(module):
+ candidate = getattr(module, name)
+ if inspect.isclass(candidate) and issubclass(candidate, message.Message):
+ answer[name] = candidate
+ return answer
+
+
+def _resolve_subkeys(key, separator="."):
+ """Resolve a potentially nested key.
+
+ If the key contains the ``separator`` (e.g. ``.``) then the key will be
+ split on the first instance of the subkey::
+
+ >>> _resolve_subkeys('a.b.c')
+ ('a', 'b.c')
+ >>> _resolve_subkeys('d|e|f', separator='|')
+ ('d', 'e|f')
+
+ If not, the subkey will be :data:`None`::
+
+ >>> _resolve_subkeys('foo')
+ ('foo', None)
+
+ Args:
+ key (str): A string that may or may not contain the separator.
+ separator (str): The namespace separator. Defaults to `.`.
+
+ Returns:
+ Tuple[str, str]: The key and subkey(s).
+ """
+ parts = key.split(separator, 1)
+
+ if len(parts) > 1:
+ return parts
+ else:
+ return parts[0], None
+
+
+def get(msg_or_dict, key, default=_SENTINEL):
+ """Retrieve a key's value from a protobuf Message or dictionary.
+
+ Args:
+ mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key to retrieve from the object.
+ default (Any): If the key is not present on the object, and a default
+ is set, returns that default instead. A type-appropriate falsy
+ default is generally recommended, as protobuf messages almost
+ always have default values for unset values and it is not always
+ possible to tell the difference between a falsy value and an
+ unset one. If no default is set then :class:`KeyError` will be
+ raised if the key is not present in the object.
+
+ Returns:
+ Any: The return value from the underlying Message or dict.
+
+ Raises:
+ KeyError: If the key is not found. Note that, for unset values,
+ messages and dictionaries may not have consistent behavior.
+ TypeError: If ``msg_or_dict`` is not a Message or Mapping.
+ """
+ # We may need to get a nested key. Resolve this.
+ key, subkey = _resolve_subkeys(key)
+
+ # Attempt to get the value from the two types of objects we know about.
+ # If we get something else, complain.
+ if isinstance(msg_or_dict, message.Message):
+ answer = getattr(msg_or_dict, key, default)
+ elif isinstance(msg_or_dict, collections.abc.Mapping):
+ answer = msg_or_dict.get(key, default)
+ else:
+ raise TypeError(
+ "get() expected a dict or protobuf message, got {!r}.".format(
+ type(msg_or_dict)
+ )
+ )
+
+ # If the object we got back is our sentinel, raise KeyError; this is
+ # a "not found" case.
+ if answer is _SENTINEL:
+ raise KeyError(key)
+
+ # If a subkey exists, call this method recursively against the answer.
+ if subkey is not None and answer is not default:
+ return get(answer, subkey, default=default)
+
+ return answer
+
+
+def _set_field_on_message(msg, key, value):
+ """Set helper for protobuf Messages."""
+ # Attempt to set the value on the types of objects we know how to deal
+ # with.
+ if isinstance(value, (collections.abc.MutableSequence, tuple)):
+ # Clear the existing repeated protobuf message of any elements
+ # currently inside it.
+ while getattr(msg, key):
+ getattr(msg, key).pop()
+
+ # Write our new elements to the repeated field.
+ for item in value:
+ if isinstance(item, collections.abc.Mapping):
+ getattr(msg, key).add(**item)
+ else:
+ # protobuf's RepeatedCompositeContainer doesn't support
+ # append.
+ getattr(msg, key).extend([item])
+ elif isinstance(value, collections.abc.Mapping):
+ # Assign the dictionary values to the protobuf message.
+ for item_key, item_value in value.items():
+ set(getattr(msg, key), item_key, item_value)
+ elif isinstance(value, message.Message):
+ getattr(msg, key).CopyFrom(value)
+ else:
+ setattr(msg, key, value)
+
+
+def set(msg_or_dict, key, value):
+ """Set a key's value on a protobuf Message or dictionary.
+
+ Args:
+ msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key to set.
+ value (Any): The value to set.
+
+ Raises:
+ TypeError: If ``msg_or_dict`` is not a Message or dictionary.
+ """
+ # Sanity check: Is our target object valid?
+ if not isinstance(msg_or_dict, (collections.abc.MutableMapping, message.Message)):
+ raise TypeError(
+ "set() expected a dict or protobuf message, got {!r}.".format(
+ type(msg_or_dict)
+ )
+ )
+
+ # We may be setting a nested key. Resolve this.
+ basekey, subkey = _resolve_subkeys(key)
+
+ # If a subkey exists, then get that object and call this method
+ # recursively against it using the subkey.
+ if subkey is not None:
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
+ msg_or_dict.setdefault(basekey, {})
+ set(get(msg_or_dict, basekey), subkey, value)
+ return
+
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
+ msg_or_dict[key] = value
+ else:
+ _set_field_on_message(msg_or_dict, key, value)
+
+
+def setdefault(msg_or_dict, key, value):
+ """Set the key on a protobuf Message or dictionary to a given value if the
+ current value is falsy.
+
+ Because protobuf Messages do not distinguish between unset values and
+ falsy ones particularly well (by design), this method treats any falsy
+ value (e.g. 0, empty list) as a target to be overwritten, on both Messages
+ and dictionaries.
+
+ Args:
+ msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
+ object.
+ key (str): The key on the object in question.
+ value (Any): The value to set.
+
+ Raises:
+ TypeError: If ``msg_or_dict`` is not a Message or dictionary.
+ """
+ if not get(msg_or_dict, key, default=None):
+ set(msg_or_dict, key, value)
+
+
+def field_mask(original, modified):
+ """Create a field mask by comparing two messages.
+
+ Args:
+ original (~google.protobuf.message.Message): the original message.
+ If set to None, this field will be interpretted as an empty
+ message.
+ modified (~google.protobuf.message.Message): the modified message.
+ If set to None, this field will be interpretted as an empty
+ message.
+
+ Returns:
+ google.protobuf.field_mask_pb2.FieldMask: field mask that contains
+ the list of field names that have different values between the two
+ messages. If the messages are equivalent, then the field mask is empty.
+
+ Raises:
+ ValueError: If the ``original`` or ``modified`` are not the same type.
+ """
+ if original is None and modified is None:
+ return field_mask_pb2.FieldMask()
+
+ if original is None and modified is not None:
+ original = copy.deepcopy(modified)
+ original.Clear()
+
+ if modified is None and original is not None:
+ modified = copy.deepcopy(original)
+ modified.Clear()
+
+ if type(original) != type(modified):
+ raise ValueError(
+ "expected that both original and modified should be of the "
+ 'same type, received "{!r}" and "{!r}".'.format(
+ type(original), type(modified)
+ )
+ )
+
+ return field_mask_pb2.FieldMask(paths=_field_mask_helper(original, modified))
+
+
+def _field_mask_helper(original, modified, current=""):
+ answer = []
+
+ for name in original.DESCRIPTOR.fields_by_name:
+ field_path = _get_path(current, name)
+
+ original_val = getattr(original, name)
+ modified_val = getattr(modified, name)
+
+ if _is_message(original_val) or _is_message(modified_val):
+ if original_val != modified_val:
+ # Wrapper types do not need to include the .value part of the
+ # path.
+ if _is_wrapper(original_val) or _is_wrapper(modified_val):
+ answer.append(field_path)
+ elif not modified_val.ListFields():
+ answer.append(field_path)
+ else:
+ answer.extend(
+ _field_mask_helper(original_val, modified_val, field_path)
+ )
+ else:
+ if original_val != modified_val:
+ answer.append(field_path)
+
+ return answer
+
+
+def _get_path(current, name):
+ # gapic-generator-python appends underscores to field names
+ # that collide with python keywords.
+ # `_` is stripped away as it is not possible to
+ # natively define a field with a trailing underscore in protobuf.
+ # APIs will reject field masks if fields have trailing underscores.
+ # See https://github.com/googleapis/python-api-core/issues/227
+ name = name.rstrip("_")
+ if not current:
+ return name
+ return "%s.%s" % (current, name)
+
+
+def _is_message(value):
+ return isinstance(value, message.Message)
+
+
+def _is_wrapper(value):
+ return type(value) in _WRAPPER_TYPES
diff --git a/google/api_core/py.typed b/google/api_core/py.typed
new file mode 100644
index 0000000..1d5517b
--- /dev/null
+++ b/google/api_core/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-api-core package uses inline types.
diff --git a/google/api_core/rest_helpers.py b/google/api_core/rest_helpers.py
new file mode 100644
index 0000000..23fb614
--- /dev/null
+++ b/google/api_core/rest_helpers.py
@@ -0,0 +1,94 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for rest transports."""
+
+import functools
+import operator
+
+
+def flatten_query_params(obj):
+ """Flatten a nested dict into a list of (name,value) tuples.
+
+ The result is suitable for setting query params on an http request.
+
+ .. code-block:: python
+
+ >>> obj = {'a':
+ ... {'b':
+ ... {'c': ['x', 'y', 'z']} },
+ ... 'd': 'uvw', }
+ >>> flatten_query_params(obj)
+ [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw')]
+
+ Note that, as described in
+ https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
+ repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts).
+ This is enforced in this function.
+
+ Args:
+ obj: a nested dictionary (from json), or None
+
+ Returns: a list of tuples, with each tuple having a (possibly) multi-part name
+ and a scalar value.
+
+ Raises:
+ TypeError if obj is not a dict or None
+ ValueError if obj contains a list of non-primitive values.
+ """
+
+ if obj is not None and not isinstance(obj, dict):
+ raise TypeError("flatten_query_params must be called with dict object")
+
+ return _flatten(obj, key_path=[])
+
+
+def _flatten(obj, key_path):
+ if obj is None:
+ return []
+ if isinstance(obj, dict):
+ return _flatten_dict(obj, key_path=key_path)
+ if isinstance(obj, list):
+ return _flatten_list(obj, key_path=key_path)
+ return _flatten_value(obj, key_path=key_path)
+
+
+def _is_primitive_value(obj):
+ if obj is None:
+ return False
+
+ if isinstance(obj, (list, dict)):
+ raise ValueError("query params may not contain repeated dicts or lists")
+
+ return True
+
+
+def _flatten_value(obj, key_path):
+ return [(".".join(key_path), obj)]
+
+
+def _flatten_dict(obj, key_path):
+ items = (_flatten(value, key_path=key_path + [key]) for key, value in obj.items())
+ return functools.reduce(operator.concat, items, [])
+
+
+def _flatten_list(elems, key_path):
+ # Only lists of scalar values are supported.
+ # The name (key_path) is repeated for each value.
+ items = (
+ _flatten_value(elem, key_path=key_path)
+ for elem in elems
+ if _is_primitive_value(elem)
+ )
+ return functools.reduce(operator.concat, items, [])
diff --git a/google/api_core/retry.py b/google/api_core/retry.py
new file mode 100644
index 0000000..bd3a4a6
--- /dev/null
+++ b/google/api_core/retry.py
@@ -0,0 +1,366 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying functions with exponential back-off.
+
+The :class:`Retry` decorator can be used to retry functions that raise
+exceptions using exponential backoff. Because a exponential sleep algorithm is
+used, the retry is limited by a `deadline`. The deadline is the maxmimum amount
+of time a method can block. This is used instead of total number of retries
+because it is difficult to ascertain the amount of time a function can block
+when using total number of retries and exponential backoff.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry.Retry()
+ def call_flaky_rpc():
+ return client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
+ def check_if_exists():
+ return client.does_thing_exist()
+
+ is_available = check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry.Retry(deadline=60)
+ result = client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import unicode_literals
+
+import datetime
+import functools
+import logging
+import random
+import time
+
+import requests.exceptions
+
+from google.api_core import datetime_helpers
+from google.api_core import exceptions
+from google.auth import exceptions as auth_exceptions
+
+_LOGGER = logging.getLogger(__name__)
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+
+def if_exception_type(*exception_types):
+ """Creates a predicate to check if the exception is of a given type.
+
+ Args:
+ exception_types (Sequence[:func:`type`]): The exception types to check
+ for.
+
+ Returns:
+ Callable[Exception]: A predicate that returns True if the provided
+ exception is of the given type(s).
+ """
+
+ def if_exception_type_predicate(exception):
+ """Bound predicate for checking an exception type."""
+ return isinstance(exception, exception_types)
+
+ return if_exception_type_predicate
+
+
+# pylint: disable=invalid-name
+# Pylint sees this as a constant, but it is also an alias that should be
+# considered a function.
+if_transient_error = if_exception_type(
+ exceptions.InternalServerError,
+ exceptions.TooManyRequests,
+ exceptions.ServiceUnavailable,
+ requests.exceptions.ConnectionError,
+ requests.exceptions.ChunkedEncodingError,
+ auth_exceptions.TransportError,
+)
+"""A predicate that checks if an exception is a transient API error.
+
+The following server errors are considered transient:
+
+- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
+ ``INTERNAL(13)`` and its subclasses.
+- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
+- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
+- :class:`requests.exceptions.ConnectionError`
+- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
+ chunked encoding but sent an invalid chunk.
+- :class:`google.auth.exceptions.TransportError` - Used to indicate an
+ error occurred during an HTTP request.
+"""
+# pylint: enable=invalid-name
+
+
+def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER):
+ """Generates sleep intervals based on the exponential back-off algorithm.
+
+ This implements the `Truncated Exponential Back-off`_ algorithm.
+
+ .. _Truncated Exponential Back-off:
+ https://cloud.google.com/storage/docs/exponential-backoff
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Yields:
+ float: successive sleep intervals.
+ """
+ delay = initial
+ while True:
+ # Introduce jitter by yielding a delay that is uniformly distributed
+ # to average out to the delay time.
+ yield min(random.uniform(0.0, delay * 2.0), maximum)
+ delay = delay * multiplier
+
+
+def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ deadline (float): How long to keep retrying the target. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ on_error (Callable[Exception]): A function to call while processing a
+ retryable exception. Any error raised by this function will *not*
+ be caught.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ google.api_core.RetryError: If the deadline is exceeded while retrying.
+ ValueError: If the sleep generator stops yielding values.
+ Exception: If the target raises a method that isn't retryable.
+ """
+ if deadline is not None:
+ deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+ seconds=deadline
+ )
+ else:
+ deadline_datetime = None
+
+ last_exc = None
+
+ for sleep in sleep_generator:
+ try:
+ return target()
+
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ if not predicate(exc):
+ raise
+ last_exc = exc
+ if on_error is not None:
+ on_error(exc)
+
+ now = datetime_helpers.utcnow()
+
+ if deadline_datetime is not None:
+ if deadline_datetime <= now:
+ raise exceptions.RetryError(
+ "Deadline of {:.1f}s exceeded while calling {}".format(
+ deadline, target
+ ),
+ last_exc,
+ ) from last_exc
+ else:
+ time_to_deadline = (deadline_datetime - now).total_seconds()
+ sleep = min(time_to_deadline, sleep)
+
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
+ )
+ time.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class Retry(object):
+ """Exponential retry decorator.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ deadline (float): How long to keep retrying in seconds. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ """
+
+ def __init__(
+ self,
+ predicate=if_transient_error,
+ initial=_DEFAULT_INITIAL_DELAY,
+ maximum=_DEFAULT_MAXIMUM_DELAY,
+ multiplier=_DEFAULT_DELAY_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ on_error=None,
+ ):
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._deadline = deadline
+ self._on_error = on_error
+
+ def __call__(self, func, on_error=None):
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(*args, **kwargs):
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ self._deadline,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
+
+ @property
+ def deadline(self):
+ return self._deadline
+
+ def with_deadline(self, deadline):
+ """Return a copy of this retry with the given deadline.
+
+ Args:
+ deadline (float): How long to keep retrying.
+
+ Returns:
+ Retry: A new retry instance with the given deadline.
+ """
+ return Retry(
+ predicate=self._predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=deadline,
+ on_error=self._on_error,
+ )
+
+ def with_predicate(self, predicate):
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return Retry(
+ predicate=predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=self._deadline,
+ on_error=self._on_error,
+ )
+
+ def with_delay(self, initial=None, maximum=None, multiplier=None):
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return Retry(
+ predicate=self._predicate,
+ initial=initial if initial is not None else self._initial,
+ maximum=maximum if maximum is not None else self._maximum,
+ multiplier=multiplier if multiplier is not None else self._multiplier,
+ deadline=self._deadline,
+ on_error=self._on_error,
+ )
+
+ def __str__(self):
+ return (
+ "<Retry predicate={}, initial={:.1f}, maximum={:.1f}, "
+ "multiplier={:.1f}, deadline={:.1f}, on_error={}>".format(
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._deadline,
+ self._on_error,
+ )
+ )
diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py
new file mode 100644
index 0000000..2dfa2f6
--- /dev/null
+++ b/google/api_core/retry_async.py
@@ -0,0 +1,291 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying coroutine functions with exponential back-off.
+
+The :class:`AsyncRetry` decorator shares most functionality and behavior with
+:class:`Retry`, but supports coroutine functions. Please refer to description
+of :class:`Retry` for more details.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry()
+ async def call_flaky_rpc():
+ return await client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = await call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
+ async def check_if_exists():
+ return await client.does_thing_exist()
+
+ is_available = await check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry_async.AsyncRetry(deadline=60)
+ result = await client.some_method(retry=my_retry)
+
+"""
+
+import asyncio
+import datetime
+import functools
+import logging
+
+from google.api_core import datetime_helpers
+from google.api_core import exceptions
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import if_exception_type # noqa: F401
+from google.api_core.retry import if_transient_error
+
+
+_LOGGER = logging.getLogger(__name__)
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+
+async def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ deadline (float): How long to keep retrying the target. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ on_error (Callable[Exception]): A function to call while processing a
+ retryable exception. Any error raised by this function will *not*
+ be caught.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ google.api_core.RetryError: If the deadline is exceeded while retrying.
+ ValueError: If the sleep generator stops yielding values.
+ Exception: If the target raises a method that isn't retryable.
+ """
+ deadline_dt = (
+ (datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline))
+ if deadline
+ else None
+ )
+
+ last_exc = None
+
+ for sleep in sleep_generator:
+ try:
+ if not deadline_dt:
+ return await target()
+ else:
+ return await asyncio.wait_for(
+ target(),
+ timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(),
+ )
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError):
+ raise
+ last_exc = exc
+ if on_error is not None:
+ on_error(exc)
+
+ now = datetime_helpers.utcnow()
+
+ if deadline_dt:
+ if deadline_dt <= now:
+ # Chains the raising RetryError with the root cause error,
+ # which helps observability and debugability.
+ raise exceptions.RetryError(
+ "Deadline of {:.1f}s exceeded while calling {}".format(
+ deadline, target
+ ),
+ last_exc,
+ ) from last_exc
+ else:
+ time_to_deadline = (deadline_dt - now).total_seconds()
+ sleep = min(time_to_deadline, sleep)
+
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
+ )
+ await asyncio.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class AsyncRetry:
+ """Exponential retry decorator for async functions.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum a,out of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amout of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ deadline (float): How long to keep retrying in seconds. The last sleep
+ period is shortened as necessary, so that the last retry runs at
+ ``deadline`` (and not considerably beyond it).
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ """
+
+ def __init__(
+ self,
+ predicate=if_transient_error,
+ initial=_DEFAULT_INITIAL_DELAY,
+ maximum=_DEFAULT_MAXIMUM_DELAY,
+ multiplier=_DEFAULT_DELAY_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ on_error=None,
+ ):
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._deadline = deadline
+ self._on_error = on_error
+
+ def __call__(self, func, on_error=None):
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(*args, **kwargs):
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return await retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ self._deadline,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
+
+ def _replace(
+ self,
+ predicate=None,
+ initial=None,
+ maximum=None,
+ multiplier=None,
+ deadline=None,
+ on_error=None,
+ ):
+ return AsyncRetry(
+ predicate=predicate or self._predicate,
+ initial=initial or self._initial,
+ maximum=maximum or self._maximum,
+ multiplier=multiplier or self._multiplier,
+ deadline=deadline or self._deadline,
+ on_error=on_error or self._on_error,
+ )
+
+ def with_deadline(self, deadline):
+ """Return a copy of this retry with the given deadline.
+
+ Args:
+ deadline (float): How long to keep retrying.
+
+ Returns:
+ AsyncRetry: A new retry instance with the given deadline.
+ """
+ return self._replace(deadline=deadline)
+
+ def with_predicate(self, predicate):
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ AsyncRetry: A new retry instance with the given predicate.
+ """
+ return self._replace(predicate=predicate)
+
+ def with_delay(self, initial=None, maximum=None, multiplier=None):
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amout of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amout of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Returns:
+ AsyncRetry: A new retry instance with the given predicate.
+ """
+ return self._replace(initial=initial, maximum=maximum, multiplier=multiplier)
+
+ def __str__(self):
+ return (
+ "<AsyncRetry predicate={}, initial={:.1f}, maximum={:.1f}, "
+ "multiplier={:.1f}, deadline={:.1f}, on_error={}>".format(
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._deadline,
+ self._on_error,
+ )
+ )
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
new file mode 100644
index 0000000..7323218
--- /dev/null
+++ b/google/api_core/timeout.py
@@ -0,0 +1,220 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Decorators for applying timeout arguments to functions.
+
+These decorators are used to wrap API methods to apply either a constant
+or exponential timeout argument.
+
+For example, imagine an API method that can take a while to return results,
+such as one that might block until a resource is ready:
+
+.. code-block:: python
+
+ def is_thing_ready(timeout=None):
+ response = requests.get('https://example.com/is_thing_ready')
+ response.raise_for_status()
+ return response.json()
+
+This module allows a function like this to be wrapped so that timeouts are
+automatically determined, for example:
+
+.. code-block:: python
+
+ timeout_ = timeout.ExponentialTimeout()
+ is_thing_ready_with_timeout = timeout_(is_thing_ready)
+
+ for n in range(10):
+ try:
+ is_thing_ready_with_timeout({'example': 'data'})
+ except:
+ pass
+
+In this example the first call to ``is_thing_ready`` will have a relatively
+small timeout (like 1 second). If the resource is available and the request
+completes quickly, the loop exits. But, if the resource isn't yet available
+and the request times out, it'll be retried - this time with a larger timeout.
+
+In the broader context these decorators are typically combined with
+:mod:`google.api_core.retry` to implement API methods with a signature that
+matches ``api_method(request, timeout=None, retry=None)``.
+"""
+
+from __future__ import unicode_literals
+
+import datetime
+import functools
+
+from google.api_core import datetime_helpers
+
+_DEFAULT_INITIAL_TIMEOUT = 5.0 # seconds
+_DEFAULT_MAXIMUM_TIMEOUT = 30.0 # seconds
+_DEFAULT_TIMEOUT_MULTIPLIER = 2.0
+# If specified, must be in seconds. If none, deadline is not used in the
+# timeout calculation.
+_DEFAULT_DEADLINE = None
+
+
+class ConstantTimeout(object):
+ """A decorator that adds a constant timeout argument.
+
+ This is effectively equivalent to
+ ``functools.partial(func, timeout=timeout)``.
+
+ Args:
+ timeout (Optional[float]): the timeout (in seconds) to applied to the
+ wrapped function. If `None`, the target function is expected to
+ never timeout.
+ """
+
+ def __init__(self, timeout=None):
+ self._timeout = timeout
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+ kwargs["timeout"] = self._timeout
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return "<ConstantTimeout timeout={:.1f}>".format(self._timeout)
+
+
+def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
+ """A generator that yields exponential timeout values.
+
+ Args:
+ initial (float): The initial timeout.
+ maximum (float): The maximum timeout.
+ multiplier (float): The multiplier applied to the timeout.
+ deadline (float): The overall deadline across all invocations.
+
+ Yields:
+ float: A timeout value.
+ """
+ if deadline is not None:
+ deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+ seconds=deadline
+ )
+ else:
+ deadline_datetime = datetime.datetime.max
+
+ timeout = initial
+ while True:
+ now = datetime_helpers.utcnow()
+ yield min(
+ # The calculated timeout based on invocations.
+ timeout,
+ # The set maximum timeout.
+ maximum,
+ # The remaining time before the deadline is reached.
+ float((deadline_datetime - now).seconds),
+ )
+ timeout = timeout * multiplier
+
+
+class ExponentialTimeout(object):
+ """A decorator that adds an exponentially increasing timeout argument.
+
+ This is useful if a function is called multiple times. Each time the
+ function is called this decorator will calculate a new timeout parameter
+ based on the the number of times the function has been called.
+
+ For example
+
+ .. code-block:: python
+
+ Args:
+ initial (float): The initial timeout to pass.
+ maximum (float): The maximum timeout for any one call.
+ multiplier (float): The multiplier applied to the timeout for each
+ invocation.
+ deadline (Optional[float]): The overall deadline across all
+ invocations. This is used to prevent a very large calculated
+ timeout from pushing the overall execution time over the deadline.
+ This is especially useful in conjuction with
+ :mod:`google.api_core.retry`. If ``None``, the timeouts will not
+ be adjusted to accomodate an overall deadline.
+ """
+
+ def __init__(
+ self,
+ initial=_DEFAULT_INITIAL_TIMEOUT,
+ maximum=_DEFAULT_MAXIMUM_TIMEOUT,
+ multiplier=_DEFAULT_TIMEOUT_MULTIPLIER,
+ deadline=_DEFAULT_DEADLINE,
+ ):
+ self._initial = initial
+ self._maximum = maximum
+ self._multiplier = multiplier
+ self._deadline = deadline
+
+ def with_deadline(self, deadline):
+ """Return a copy of this teimout with the given deadline.
+
+ Args:
+ deadline (float): The overall deadline across all invocations.
+
+ Returns:
+ ExponentialTimeout: A new instance with the given deadline.
+ """
+ return ExponentialTimeout(
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ deadline=deadline,
+ )
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+ timeouts = _exponential_timeout_generator(
+ self._initial, self._maximum, self._multiplier, self._deadline
+ )
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+ kwargs["timeout"] = next(timeouts)
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return (
+ "<ExponentialTimeout initial={:.1f}, maximum={:.1f}, "
+ "multiplier={:.1f}, deadline={:.1f}>".format(
+ self._initial, self._maximum, self._multiplier, self._deadline
+ )
+ )
diff --git a/google/api_core/version.py b/google/api_core/version.py
new file mode 100644
index 0000000..999199f
--- /dev/null
+++ b/google/api_core/version.py
@@ -0,0 +1,15 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "2.3.0"
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000..5c11157
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,4 @@
+[mypy]
+python_version = 3.6
+namespace_packages = True
+ignore_missing_imports = True
diff --git a/noxfile.py b/noxfile.py
new file mode 100644
index 0000000..db37c56
--- /dev/null
+++ b/noxfile.py
@@ -0,0 +1,245 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+import os
+import pathlib
+import shutil
+
+# https://github.com/google/importlab/issues/25
+import nox # pytype: disable=import-error
+
+
+BLACK_VERSION = "black==19.10b0"
+BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
+# Black and flake8 clash on the syntax for ignoring flake8's F401 in this file.
+BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"]
+
+DEFAULT_PYTHON_VERSION = "3.7"
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "unit_grpc_gcp",
+ "unit_wo_grpc",
+ "cover",
+ "pytype",
+ "mypy",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+
+def _greater_or_equal_than_36(version_string):
+ tokens = version_string.split(".")
+ for i, token in enumerate(tokens):
+ try:
+ tokens[i] = int(token)
+ except ValueError:
+ pass
+ return tokens >= [3, 6]
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint(session):
+ """Run linters.
+
+ Returns a failure if the linters find linting errors or sufficiently
+ serious code quality issues.
+ """
+ session.install("flake8", "flake8-import-order", BLACK_VERSION)
+ session.install(".")
+ session.run(
+ "black", "--check", *BLACK_EXCLUDES, *BLACK_PATHS,
+ )
+ session.run("flake8", "google", "tests")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def blacken(session):
+ """Run black.
+
+ Format code to uniform standard.
+ """
+ session.install(BLACK_VERSION)
+ session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS)
+
+
+def default(session, install_grpc=True):
+ """Default unit test session.
+
+ This is intended to be run **without** an interpreter set, so
+ that the current ``python`` (on the ``PATH``) or the version of
+ Python corresponding to the ``nox`` binary the ``PATH`` can
+ run the tests.
+ """
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+
+ # Install all test dependencies, then install this package in-place.
+ session.install("mock", "pytest", "pytest-cov")
+ if install_grpc:
+ session.install("-e", ".[grpc]", "-c", constraints_path)
+ else:
+ session.install("-e", ".", "-c", constraints_path)
+
+ pytest_args = [
+ "python",
+ "-m",
+ "py.test",
+ "--quiet",
+ "--cov=google.api_core",
+ "--cov=tests.unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "unit"),
+ ]
+ pytest_args.extend(session.posargs)
+
+ # Inject AsyncIO content and proto-plus, if version >= 3.6.
+ # proto-plus is needed for a field mask test in test_protobuf_helpers.py
+ if _greater_or_equal_than_36(session.python):
+ session.install("asyncmock", "pytest-asyncio", "proto-plus")
+
+ pytest_args.append("--cov=tests.asyncio")
+ pytest_args.append(os.path.join("tests", "asyncio"))
+ session.run(*pytest_args)
+ else:
+ # Run py.test against the unit tests.
+ session.run(*pytest_args)
+
+
+@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10"])
+def unit(session):
+ """Run the unit test suite."""
+ default(session)
+
+
+@nox.session(python=["3.6", "3.7", "3.8", "3.9"])
+def unit_grpc_gcp(session):
+ """Run the unit test suite with grpcio-gcp installed."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ # Install grpcio-gcp
+ session.install("-e", ".[grpcgcp]", "-c", constraints_path)
+
+ default(session)
+
+
+@nox.session(python=["3.6", "3.10"])
+def unit_wo_grpc(session):
+ """Run the unit test suite w/o grpcio installed"""
+ default(session, install_grpc=False)
+
+
+@nox.session(python="3.6")
+def lint_setup_py(session):
+ """Verify that setup.py is valid (including RST check)."""
+
+ session.install("docutils", "Pygments")
+ session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
+
+
+# No 3.7 because pytype supports up to 3.6 only.
+@nox.session(python="3.6")
+def pytype(session):
+ """Run type-checking."""
+ session.install(".[grpc, grpcgcp]", "pytype >= 2019.3.21")
+ session.run("pytype")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def mypy(session):
+ """Run type-checking."""
+ session.install(".[grpc, grpcgcp]", "mypy")
+ session.install(
+ "types-setuptools", "types-requests", "types-protobuf", "types-mock"
+ )
+ session.run("mypy", "google", "tests")
+
+
+@nox.session(python="3.6")
+def cover(session):
+ """Run the final coverage report.
+
+ This outputs the coverage report aggregating coverage from the unit
+ test runs (not system test runs), and then erases coverage data.
+ """
+ session.install("coverage", "pytest-cov")
+ session.run("coverage", "report", "--show-missing", "--fail-under=100")
+ session.run("coverage", "erase")
+
+
+@nox.session(python="3.8")
+def docs(session):
+ """Build the docs for this library."""
+
+ session.install("-e", ".[grpc, grpcgcp]")
+ session.install("sphinx==4.0.1", "alabaster", "recommonmark")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-W", # warnings as errors
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
+
+
+@nox.session(python="3.8")
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ session.install(
+ "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+ )
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/owlbot.py b/owlbot.py
new file mode 100644
index 0000000..451f7c4
--- /dev/null
+++ b/owlbot.py
@@ -0,0 +1,47 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script is used to synthesize generated parts of this library."""
+
+import synthtool as s
+from synthtool import gcp
+
+common = gcp.CommonTemplates()
+
+# ----------------------------------------------------------------------------
+# Add templated files
+# ----------------------------------------------------------------------------
+excludes = [
+ "noxfile.py", # pytype
+ "setup.cfg", # pytype
+ ".flake8", # flake8-import-order, layout
+ ".coveragerc", # layout
+ "CONTRIBUTING.rst", # no systests
+]
+templated_files = common.py_library(microgenerator=True, cov_level=100)
+s.move(templated_files, excludes=excludes)
+
+# Add pytype support
+s.replace(
+ ".gitignore",
+ """\
+.pytest_cache
+""",
+ """\
+.pytest_cache
+.pytype
+""",
+)
+
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 0000000..c21036d
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,12 @@
+{
+ "extends": [
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"],
+ "pip_requirements": {
+ "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
+ }
+}
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 0000000..21f6d2a
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 0000000..d309d6e
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 0000000..4fd2397
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 0000000..1446b94
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 0000000..11957ce
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 0000000..275d649
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 3.6+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 0000000..5ea33d1
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..0be0b3f
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,12 @@
+[bdist_wheel]
+universal = 1
+
+[pytype]
+python_version = 3.6
+inputs =
+ google/
+exclude =
+ tests/
+output = .pytype/
+# Workaround for https://github.com/google/pytype/issues/150
+disable = pyi-error
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..ddc5600
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,102 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import os
+
+import setuptools
+
+
+# Package metadata.
+
+name = "google-api-core"
+description = "Google API client core library"
+
+# Should be one of:
+# 'Development Status :: 3 - Alpha'
+# 'Development Status :: 4 - Beta'
+# 'Development Status :: 5 - Production/Stable'
+release_status = "Development Status :: 5 - Production/Stable"
+dependencies = [
+ "googleapis-common-protos >= 1.52.0, < 2.0dev",
+ "protobuf >= 3.12.0",
+ "google-auth >= 1.25.0, < 3.0dev",
+ "requests >= 2.18.0, < 3.0.0dev",
+ "setuptools >= 40.3.0",
+]
+extras = {
+ "grpc": ["grpcio >= 1.33.2, < 2.0dev", "grpcio-status >= 1.33.2, < 2.0dev"],
+ "grpcgcp": "grpcio-gcp >= 0.2.2",
+ "grpcio-gcp": "grpcio-gcp >= 0.2.2",
+}
+
+
+# Setup boilerplate below this line.
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+
+version = {}
+with open(os.path.join(package_root, "google/api_core/version.py")) as fp:
+ exec(fp.read(), version)
+version = version["__version__"]
+
+readme_filename = os.path.join(package_root, "README.rst")
+with io.open(readme_filename, encoding="utf-8") as readme_file:
+ readme = readme_file.read()
+
+# Only include packages under the 'google' namespace. Do not include tests,
+# benchmarks, etc.
+packages = [
+ package for package in setuptools.find_packages() if package.startswith("google")
+]
+
+# Determine which namespaces are needed.
+namespaces = ["google"]
+if "google.cloud" in packages:
+ namespaces.append("google.cloud")
+
+
+setuptools.setup(
+ name=name,
+ version=version,
+ description=description,
+ long_description=readme,
+ author="Google LLC",
+ author_email="googleapis-packages@google.com",
+ license="Apache 2.0",
+ url="https://github.com/googleapis/python-api-core",
+ classifiers=[
+ release_status,
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Operating System :: OS Independent",
+ "Topic :: Internet",
+ ],
+ platforms="Posix; MacOS X; Windows",
+ packages=packages,
+ namespace_packages=namespaces,
+ install_requires=dependencies,
+ extras_require=extras,
+ python_requires=">=3.6",
+ include_package_data=True,
+ zip_safe=False,
+)
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 0000000..b05fbd6
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json \ No newline at end of file
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.10.txt
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.11.txt
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
new file mode 100644
index 0000000..0c2a07b
--- /dev/null
+++ b/testing/constraints-3.6.txt
@@ -0,0 +1,17 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+googleapis-common-protos==1.52.0
+protobuf==3.12.0
+google-auth==1.25.0
+requests==2.18.0
+setuptools==40.3.0
+packaging==14.3
+grpcio==1.33.2
+grpcio-gcp==0.2.2
+grpcio-gcp==0.2.2
+grpcio-status==1.33.2
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.7.txt
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.8.txt
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.9.txt
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/__init__.py
diff --git a/tests/asyncio/__init__.py b/tests/asyncio/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/asyncio/__init__.py
diff --git a/tests/asyncio/future/__init__.py b/tests/asyncio/future/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/asyncio/future/__init__.py
diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py
new file mode 100644
index 0000000..1e9ae33
--- /dev/null
+++ b/tests/asyncio/future/test_async_future.py
@@ -0,0 +1,228 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+
+import mock
+import pytest
+
+from google.api_core import exceptions
+from google.api_core.future import async_future
+
+
+class AsyncFuture(async_future.AsyncFuture):
+ async def done(self):
+ return False
+
+ async def cancel(self):
+ return True
+
+ async def cancelled(self):
+ return False
+
+ async def running(self):
+ return True
+
+
+@pytest.mark.asyncio
+async def test_polling_future_constructor():
+ future = AsyncFuture()
+ assert not await future.done()
+ assert not await future.cancelled()
+ assert await future.running()
+ assert await future.cancel()
+
+
+@pytest.mark.asyncio
+async def test_set_result():
+ future = AsyncFuture()
+ callback = mock.Mock()
+
+ future.set_result(1)
+
+ assert await future.result() == 1
+ callback_called = asyncio.Event()
+
+ def callback(unused_future):
+ callback_called.set()
+
+ future.add_done_callback(callback)
+ await callback_called.wait()
+
+
+@pytest.mark.asyncio
+async def test_set_exception():
+ future = AsyncFuture()
+ exception = ValueError("meep")
+
+ future.set_exception(exception)
+
+ assert await future.exception() == exception
+ with pytest.raises(ValueError):
+ await future.result()
+
+ callback_called = asyncio.Event()
+
+ def callback(unused_future):
+ callback_called.set()
+
+ future.add_done_callback(callback)
+ await callback_called.wait()
+
+
+@pytest.mark.asyncio
+async def test_invoke_callback_exception():
+ future = AsyncFuture()
+ future.set_result(42)
+
+ # This should not raise, despite the callback causing an exception.
+ callback_called = asyncio.Event()
+
+ def callback(unused_future):
+ callback_called.set()
+ raise ValueError()
+
+ future.add_done_callback(callback)
+ await callback_called.wait()
+
+
+class AsyncFutureWithPoll(AsyncFuture):
+ def __init__(self):
+ super().__init__()
+ self.poll_count = 0
+ self.event = asyncio.Event()
+
+ async def done(self):
+ self.poll_count += 1
+ await self.event.wait()
+ self.set_result(42)
+ return True
+
+
+@pytest.mark.asyncio
+async def test_result_with_polling():
+ future = AsyncFutureWithPoll()
+
+ future.event.set()
+ result = await future.result()
+
+ assert result == 42
+ assert future.poll_count == 1
+ # Repeated calls should not cause additional polling
+ assert await future.result() == result
+ assert future.poll_count == 1
+
+
+class AsyncFutureTimeout(AsyncFutureWithPoll):
+ async def done(self):
+ await asyncio.sleep(0.2)
+ return False
+
+
+@pytest.mark.asyncio
+async def test_result_timeout():
+ future = AsyncFutureTimeout()
+ with pytest.raises(asyncio.TimeoutError):
+ await future.result(timeout=0.2)
+
+
+@pytest.mark.asyncio
+async def test_exception_timeout():
+ future = AsyncFutureTimeout()
+ with pytest.raises(asyncio.TimeoutError):
+ await future.exception(timeout=0.2)
+
+
+@pytest.mark.asyncio
+async def test_result_timeout_with_retry():
+ future = AsyncFutureTimeout()
+ with pytest.raises(asyncio.TimeoutError):
+ await future.exception(timeout=0.4)
+
+
+class AsyncFutureTransient(AsyncFutureWithPoll):
+ def __init__(self, errors):
+ super().__init__()
+ self._errors = errors
+
+ async def done(self):
+ if self._errors:
+ error, self._errors = self._errors[0], self._errors[1:]
+ raise error("testing")
+ self.poll_count += 1
+ self.set_result(42)
+ return True
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@pytest.mark.asyncio
+async def test_result_transient_error(unused_sleep):
+ future = AsyncFutureTransient(
+ (
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+ )
+ )
+ result = await future.result()
+ assert result == 42
+ assert future.poll_count == 1
+ # Repeated calls should not cause additional polling
+ assert await future.result() == result
+ assert future.poll_count == 1
+
+
+@pytest.mark.asyncio
+async def test_callback_concurrency():
+ future = AsyncFutureWithPoll()
+
+ callback_called = asyncio.Event()
+
+ def callback(unused_future):
+ callback_called.set()
+
+ future.add_done_callback(callback)
+
+ # Give the thread a second to poll
+ await asyncio.sleep(1)
+ assert future.poll_count == 1
+
+ future.event.set()
+ await callback_called.wait()
+
+
+@pytest.mark.asyncio
+async def test_double_callback_concurrency():
+ future = AsyncFutureWithPoll()
+
+ callback_called = asyncio.Event()
+
+ def callback(unused_future):
+ callback_called.set()
+
+ callback_called2 = asyncio.Event()
+
+ def callback2(unused_future):
+ callback_called2.set()
+
+ future.add_done_callback(callback)
+ future.add_done_callback(callback2)
+
+ # Give the thread a second to poll
+ await asyncio.sleep(1)
+ future.event.set()
+
+ assert future.poll_count == 1
+ await callback_called.wait()
+ await callback_called2.wait()
diff --git a/tests/asyncio/gapic/test_config_async.py b/tests/asyncio/gapic/test_config_async.py
new file mode 100644
index 0000000..dbb05d5
--- /dev/null
+++ b/tests/asyncio/gapic/test_config_async.py
@@ -0,0 +1,95 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core.gapic_v1 import config_async
+
+
+INTERFACE_CONFIG = {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "other": ["FAILED_PRECONDITION"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000,
+ },
+ "other": {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 1,
+ "max_retry_delay_millis": 1000,
+ "initial_rpc_timeout_millis": 1000,
+ "rpc_timeout_multiplier": 1,
+ "max_rpc_timeout_millis": 1000,
+ "total_timeout_millis": 1000,
+ },
+ },
+ "methods": {
+ "AnnotateVideo": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "Other": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "other",
+ "retry_params_name": "other",
+ },
+ "Plain": {"timeout_millis": 30000},
+ },
+}
+
+
+def test_create_method_configs():
+ method_configs = config_async.parse_method_configs(INTERFACE_CONFIG)
+
+ retry, timeout = method_configs["AnnotateVideo"]
+ assert retry._predicate(exceptions.DeadlineExceeded(None))
+ assert retry._predicate(exceptions.ServiceUnavailable(None))
+ assert retry._initial == 1.0
+ assert retry._multiplier == 2.5
+ assert retry._maximum == 120.0
+ assert retry._deadline == 600.0
+ assert timeout._initial == 120.0
+ assert timeout._multiplier == 1.0
+ assert timeout._maximum == 120.0
+
+ retry, timeout = method_configs["Other"]
+ assert retry._predicate(exceptions.FailedPrecondition(None))
+ assert retry._initial == 1.0
+ assert retry._multiplier == 1.0
+ assert retry._maximum == 1.0
+ assert retry._deadline == 1.0
+ assert timeout._initial == 1.0
+ assert timeout._multiplier == 1.0
+ assert timeout._maximum == 1.0
+
+ retry, timeout = method_configs["Plain"]
+ assert retry is None
+ assert timeout._timeout == 30.0
diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py
new file mode 100644
index 0000000..1410747
--- /dev/null
+++ b/tests/asyncio/gapic/test_method_async.py
@@ -0,0 +1,248 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+import mock
+import pytest
+
+try:
+ from grpc import aio
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import retry_async
+from google.api_core import timeout
+
+
+def _utcnow_monotonic():
+ current_time = datetime.datetime.min
+ delta = datetime.timedelta(seconds=0.5)
+ while True:
+ yield current_time
+ current_time += delta
+
+
+@pytest.mark.asyncio
+async def test_wrap_method_basic():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method)
+
+ result = await wrapped_method(1, 2, meep="moop")
+
+ assert result == 42
+ method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
+
+ # Check that the default client info was specified in the metadata.
+ metadata = method.call_args[1]["metadata"]
+ assert len(metadata) == 1
+ client_info = gapic_v1.client_info.DEFAULT_CLIENT_INFO
+ user_agent_metadata = client_info.to_grpc_metadata()
+ assert user_agent_metadata in metadata
+
+
+@pytest.mark.asyncio
+async def test_wrap_method_with_no_client_info():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method, client_info=None)
+
+ await wrapped_method(1, 2, meep="moop")
+
+ method.assert_called_once_with(1, 2, meep="moop")
+
+
+@pytest.mark.asyncio
+async def test_wrap_method_with_custom_client_info():
+ client_info = gapic_v1.client_info.ClientInfo(
+ python_version=1,
+ grpc_version=2,
+ api_core_version=3,
+ gapic_version=4,
+ client_library_version=5,
+ )
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method, client_info=client_info)
+
+ await wrapped_method(1, 2, meep="moop")
+
+ method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
+
+ # Check that the custom client info was specified in the metadata.
+ metadata = method.call_args[1]["metadata"]
+ assert client_info.to_grpc_metadata() in metadata
+
+
+@pytest.mark.asyncio
+async def test_invoke_wrapped_method_with_metadata():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method)
+
+ await wrapped_method(mock.sentinel.request, metadata=[("a", "b")])
+
+ method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
+ metadata = method.call_args[1]["metadata"]
+ # Metadata should have two items: the client info metadata and our custom
+ # metadata.
+ assert len(metadata) == 2
+ assert ("a", "b") in metadata
+
+
+@pytest.mark.asyncio
+async def test_invoke_wrapped_method_with_metadata_as_none():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method)
+
+ await wrapped_method(mock.sentinel.request, metadata=None)
+
+ method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
+ metadata = method.call_args[1]["metadata"]
+ # Metadata should have just one items: the client info metadata.
+ assert len(metadata) == 1
+
+
+@mock.patch("asyncio.sleep")
+@pytest.mark.asyncio
+async def test_wrap_method_with_default_retry_and_timeout(unused_sleep):
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=[exceptions.InternalServerError(None), fake_call],
+ )
+
+ default_retry = retry_async.AsyncRetry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = await wrapped_method()
+
+ assert result == 42
+ assert method.call_count == 2
+ method.assert_called_with(timeout=60, metadata=mock.ANY)
+
+
+@mock.patch("asyncio.sleep")
+@pytest.mark.asyncio
+async def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep):
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=[exceptions.InternalServerError(None), fake_call],
+ )
+
+ default_retry = retry_async.AsyncRetry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = await wrapped_method(
+ retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT,
+ )
+
+ assert result == 42
+ assert method.call_count == 2
+ method.assert_called_with(timeout=60, metadata=mock.ANY)
+
+
+@mock.patch("asyncio.sleep")
+@pytest.mark.asyncio
+async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep):
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=[exceptions.NotFound(None), fake_call],
+ )
+
+ default_retry = retry_async.AsyncRetry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = await wrapped_method(
+ retry=retry_async.AsyncRetry(
+ retry_async.if_exception_type(exceptions.NotFound)
+ ),
+ timeout=timeout.ConstantTimeout(22),
+ )
+
+ assert result == 42
+ assert method.call_count == 2
+ method.assert_called_with(timeout=22, metadata=mock.ANY)
+
+
+@mock.patch("asyncio.sleep")
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ side_effect=_utcnow_monotonic(),
+ autospec=True,
+)
+@pytest.mark.asyncio
+async def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
+ method = mock.Mock(
+ spec=aio.UnaryUnaryMultiCallable,
+ side_effect=([exceptions.InternalServerError(None)] * 4) + [fake_call],
+ )
+
+ default_retry = retry_async.AsyncRetry()
+ default_timeout = timeout.ExponentialTimeout(deadline=60)
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ # Overriding only the retry's deadline should also override the timeout's
+ # deadline.
+ result = await wrapped_method(retry=default_retry.with_deadline(30))
+
+ assert result == 42
+ timeout_args = [call[1]["timeout"] for call in method.call_args_list]
+ assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
+ assert utcnow.call_count == (
+ 1
+ + 1 # Compute wait_for timeout in retry_async
+ + 5 # First to set the deadline.
+ + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
+ )
+
+
+@pytest.mark.asyncio
+async def test_wrap_method_with_overriding_timeout_as_a_number():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+ default_retry = retry_async.AsyncRetry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = await wrapped_method(timeout=22)
+
+ assert result == 42
+ method.assert_called_once_with(timeout=22, metadata=mock.ANY)
diff --git a/tests/asyncio/operations_v1/__init__.py b/tests/asyncio/operations_v1/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/asyncio/operations_v1/__init__.py
diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py
new file mode 100644
index 0000000..47c3b4b
--- /dev/null
+++ b/tests/asyncio/operations_v1/test_operations_async_client.py
@@ -0,0 +1,113 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import pytest
+
+try:
+ from grpc import aio
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import grpc_helpers_async
+from google.api_core import operations_v1
+from google.api_core import page_iterator_async
+from google.longrunning import operations_pb2
+from google.protobuf import empty_pb2
+
+
+def _mock_grpc_objects(response):
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall(response)
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+ mocked_channel = mock.Mock()
+ mocked_channel.unary_unary = mock.Mock(return_value=method)
+ return mocked_channel, method, fake_call
+
+
+@pytest.mark.asyncio
+async def test_get_operation():
+ mocked_channel, method, fake_call = _mock_grpc_objects(
+ operations_pb2.Operation(name="meep")
+ )
+ client = operations_v1.OperationsAsyncClient(mocked_channel)
+
+ response = await client.get_operation("name", metadata=[("header", "foo")])
+ assert method.call_count == 1
+ assert tuple(method.call_args_list[0])[0][0].name == "name"
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
+ assert response == fake_call.response
+
+
+@pytest.mark.asyncio
+async def test_list_operations():
+ operations = [
+ operations_pb2.Operation(name="1"),
+ operations_pb2.Operation(name="2"),
+ ]
+ list_response = operations_pb2.ListOperationsResponse(operations=operations)
+
+ mocked_channel, method, fake_call = _mock_grpc_objects(list_response)
+ client = operations_v1.OperationsAsyncClient(mocked_channel)
+
+ pager = await client.list_operations("name", "filter", metadata=[("header", "foo")])
+
+ assert isinstance(pager, page_iterator_async.AsyncIterator)
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ assert responses == operations
+
+ assert method.call_count == 1
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
+ request = tuple(method.call_args_list[0])[0][0]
+ assert isinstance(request, operations_pb2.ListOperationsRequest)
+ assert request.name == "name"
+ assert request.filter == "filter"
+
+
+@pytest.mark.asyncio
+async def test_delete_operation():
+ mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
+ client = operations_v1.OperationsAsyncClient(mocked_channel)
+
+ await client.delete_operation("name", metadata=[("header", "foo")])
+
+ assert method.call_count == 1
+ assert tuple(method.call_args_list[0])[0][0].name == "name"
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
+
+
+@pytest.mark.asyncio
+async def test_cancel_operation():
+ mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
+ client = operations_v1.OperationsAsyncClient(mocked_channel)
+
+ await client.cancel_operation("name", metadata=[("header", "foo")])
+
+ assert method.call_count == 1
+ assert tuple(method.call_args_list[0])[0][0].name == "name"
+ assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
+ "metadata"
+ ]
diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py
new file mode 100644
index 0000000..3681a40
--- /dev/null
+++ b/tests/asyncio/test_grpc_helpers_async.py
@@ -0,0 +1,589 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import pytest # noqa: I202
+
+try:
+ import grpc
+ from grpc import aio
+except ImportError:
+ grpc = aio = None
+
+
+if grpc is None:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+
+from google.api_core import exceptions
+from google.api_core import grpc_helpers_async
+import google.auth.credentials
+
+
+class RpcErrorImpl(grpc.RpcError, grpc.Call):
+ def __init__(self, code):
+ super(RpcErrorImpl, self).__init__()
+ self._code = code
+
+ def code(self):
+ return self._code
+
+ def details(self):
+ return None
+
+ def trailing_metadata(self):
+ return None
+
+
+@pytest.mark.asyncio
+async def test_wrap_unary_errors():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+ callable_ = mock.AsyncMock(spec=["__call__"], side_effect=grpc_error)
+
+ wrapped_callable = grpc_helpers_async._wrap_unary_errors(callable_)
+
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ await wrapped_callable(1, 2, three="four")
+
+ callable_.assert_called_once_with(1, 2, three="four")
+ assert exc_info.value.response == grpc_error
+
+
+@pytest.mark.asyncio
+async def test_common_methods_in_wrapped_call():
+ mock_call = mock.Mock(aio.UnaryUnaryCall, autospec=True)
+ wrapped_call = grpc_helpers_async._WrappedUnaryUnaryCall().with_call(mock_call)
+
+ await wrapped_call.initial_metadata()
+ assert mock_call.initial_metadata.call_count == 1
+
+ await wrapped_call.trailing_metadata()
+ assert mock_call.trailing_metadata.call_count == 1
+
+ await wrapped_call.code()
+ assert mock_call.code.call_count == 1
+
+ await wrapped_call.details()
+ assert mock_call.details.call_count == 1
+
+ wrapped_call.cancelled()
+ assert mock_call.cancelled.call_count == 1
+
+ wrapped_call.done()
+ assert mock_call.done.call_count == 1
+
+ wrapped_call.time_remaining()
+ assert mock_call.time_remaining.call_count == 1
+
+ wrapped_call.cancel()
+ assert mock_call.cancel.call_count == 1
+
+ callback = mock.sentinel.callback
+ wrapped_call.add_done_callback(callback)
+ mock_call.add_done_callback.assert_called_once_with(callback)
+
+ await wrapped_call.wait_for_connection()
+ assert mock_call.wait_for_connection.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_unary_stream():
+ mock_call = mock.Mock(aio.UnaryStreamCall, autospec=True)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ await wrapped_callable(1, 2, three="four")
+ multicallable.assert_called_once_with(1, 2, three="four")
+ assert mock_call.wait_for_connection.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_stream_unary():
+ mock_call = mock.Mock(aio.StreamUnaryCall, autospec=True)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ await wrapped_callable(1, 2, three="four")
+ multicallable.assert_called_once_with(1, 2, three="four")
+ assert mock_call.wait_for_connection.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_stream_stream():
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ await wrapped_callable(1, 2, three="four")
+ multicallable.assert_called_once_with(1, 2, three="four")
+ assert mock_call.wait_for_connection.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_type_error():
+ mock_call = mock.Mock()
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ with pytest.raises(TypeError):
+ await wrapped_callable()
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_raised():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ mock_call.wait_for_connection = mock.AsyncMock(side_effect=[grpc_error])
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ with pytest.raises(exceptions.InvalidArgument):
+ await wrapped_callable()
+ assert mock_call.wait_for_connection.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_read():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ mock_call.read = mock.AsyncMock(side_effect=grpc_error)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ wrapped_call = await wrapped_callable(1, 2, three="four")
+ multicallable.assert_called_once_with(1, 2, three="four")
+ assert mock_call.wait_for_connection.call_count == 1
+
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ await wrapped_call.read()
+ assert exc_info.value.response == grpc_error
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_aiter():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ mocked_aiter = mock.Mock(spec=["__anext__"])
+ mocked_aiter.__anext__ = mock.AsyncMock(
+ side_effect=[mock.sentinel.response, grpc_error]
+ )
+ mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_call = await wrapped_callable()
+
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ async for response in wrapped_call:
+ assert response == mock.sentinel.response
+ assert exc_info.value.response == grpc_error
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_aiter_non_rpc_error():
+ non_grpc_error = TypeError("Not a gRPC error")
+
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ mocked_aiter = mock.Mock(spec=["__anext__"])
+ mocked_aiter.__anext__ = mock.AsyncMock(
+ side_effect=[mock.sentinel.response, non_grpc_error]
+ )
+ mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_call = await wrapped_callable()
+
+ with pytest.raises(TypeError) as exc_info:
+ async for response in wrapped_call:
+ assert response == mock.sentinel.response
+ assert exc_info.value == non_grpc_error
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_aiter_called_multiple_times():
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_call = await wrapped_callable()
+
+ assert wrapped_call.__aiter__() == wrapped_call.__aiter__()
+
+
+@pytest.mark.asyncio
+async def test_wrap_stream_errors_write():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+
+ mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
+ mock_call.write = mock.AsyncMock(side_effect=[None, grpc_error])
+ mock_call.done_writing = mock.AsyncMock(side_effect=[None, grpc_error])
+ multicallable = mock.Mock(return_value=mock_call)
+
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+
+ wrapped_call = await wrapped_callable()
+
+ await wrapped_call.write(mock.sentinel.request)
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ await wrapped_call.write(mock.sentinel.request)
+ assert mock_call.write.call_count == 2
+ assert exc_info.value.response == grpc_error
+
+ await wrapped_call.done_writing()
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ await wrapped_call.done_writing()
+ assert mock_call.done_writing.call_count == 2
+ assert exc_info.value.response == grpc_error
+
+
+@mock.patch("google.api_core.grpc_helpers_async._wrap_unary_errors")
+def test_wrap_errors_non_streaming(wrap_unary_errors):
+ callable_ = mock.create_autospec(aio.UnaryUnaryMultiCallable)
+
+ result = grpc_helpers_async.wrap_errors(callable_)
+
+ assert result == wrap_unary_errors.return_value
+ wrap_unary_errors.assert_called_once_with(callable_)
+
+
+@mock.patch("google.api_core.grpc_helpers_async._wrap_stream_errors")
+def test_wrap_errors_streaming(wrap_stream_errors):
+ callable_ = mock.create_autospec(aio.UnaryStreamMultiCallable)
+
+ result = grpc_helpers_async.wrap_errors(callable_)
+
+ assert result == wrap_stream_errors.return_value
+ wrap_stream_errors.assert_called_once_with(callable_)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(target)
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
+@mock.patch(
+ "google.auth.transport.requests.Request",
+ autospec=True,
+ return_value=mock.sentinel.Request,
+)
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit_with_default_host(
+ grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin
+):
+ target = "example.com:443"
+ default_host = "example.com"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(target, default_host=default_host)
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
+ auth_metadata_plugin.assert_called_once_with(
+ mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
+ )
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit_with_ssl_creds(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+
+ ssl_creds = grpc.ssl_channel_credentials()
+
+ grpc_helpers_async.create_channel(target, ssl_credentials=ssl_creds)
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
+ composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
+ composite_creds = composite_creds_call.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit_with_scopes(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(target, scopes=["one", "two"])
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_implicit_with_default_scopes(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, default_scopes=["three", "four"]
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+def test_create_channel_explicit_with_duplicate_credentials():
+ target = "example:443"
+
+ with pytest.raises(exceptions.DuplicateCredentialArgs) as excinfo:
+ grpc_helpers_async.create_channel(
+ target,
+ credentials_file="credentials.json",
+ credentials=mock.sentinel.credentials,
+ )
+
+ assert "mutually exclusive" in str(excinfo.value)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials=mock.sentinel.credentials
+ )
+
+ auth_creds.assert_called_once_with(
+ mock.sentinel.credentials, scopes=None, default_scopes=None
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
+ target = "example.com:443"
+ scopes = ["1", "2"]
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials=credentials, scopes=scopes
+ )
+
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_explicit_default_scopes(
+ grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials=credentials, default_scopes=default_scopes
+ )
+
+ credentials.with_scopes.assert_called_once_with(
+ scopes=None, default_scopes=default_scopes
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_explicit_with_quota_project(
+ grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(
+ google.auth.credentials.CredentialsWithQuotaProject, instance=True
+ )
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials=credentials, quota_project_id="project-foo"
+ )
+
+ credentials.with_quota_project.assert_called_once_with("project-foo")
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channnel_with_credentials_file(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials_file=credentials_file
+ )
+
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=None
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file_and_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ scopes = ["1", "2"]
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials_file=credentials_file, scopes=scopes
+ )
+
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=scopes, default_scopes=None
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.aio.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file_and_default_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers_async.create_channel(
+ target, credentials_file=credentials_file, default_scopes=default_scopes
+ )
+
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=default_scopes
+ )
+ assert channel is grpc_secure_channel.return_value
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@pytest.mark.skipif(
+ grpc_helpers_async.HAS_GRPC_GCP, reason="grpc_gcp module not available"
+)
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel_without_grpc_gcp(grpc_secure_channel):
+ target = "example.com:443"
+ scopes = ["test_scope"]
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ grpc_helpers_async.create_channel(target, credentials=credentials, scopes=scopes)
+ grpc_secure_channel.assert_called()
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
+
+
+@pytest.mark.asyncio
+async def test_fake_stream_unary_call():
+ fake_call = grpc_helpers_async.FakeStreamUnaryCall()
+ await fake_call.wait_for_connection()
+ response = await fake_call
+ assert fake_call.response == response
diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py
new file mode 100644
index 0000000..26ad7ce
--- /dev/null
+++ b/tests/asyncio/test_operation_async.py
@@ -0,0 +1,201 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core import operation_async
+from google.api_core import operations_v1
+from google.api_core import retry_async
+from google.longrunning import operations_pb2
+from google.protobuf import struct_pb2
+from google.rpc import code_pb2
+from google.rpc import status_pb2
+
+TEST_OPERATION_NAME = "test/operation"
+
+
+def make_operation_proto(
+ name=TEST_OPERATION_NAME, metadata=None, response=None, error=None, **kwargs
+):
+ operation_proto = operations_pb2.Operation(name=name, **kwargs)
+
+ if metadata is not None:
+ operation_proto.metadata.Pack(metadata)
+
+ if response is not None:
+ operation_proto.response.Pack(response)
+
+ if error is not None:
+ operation_proto.error.CopyFrom(error)
+
+ return operation_proto
+
+
+def make_operation_future(client_operations_responses=None):
+ if client_operations_responses is None:
+ client_operations_responses = [make_operation_proto()]
+
+ refresh = mock.AsyncMock(spec=["__call__"], side_effect=client_operations_responses)
+ refresh.responses = client_operations_responses
+ cancel = mock.AsyncMock(spec=["__call__"])
+ operation_future = operation_async.AsyncOperation(
+ client_operations_responses[0],
+ refresh,
+ cancel,
+ result_type=struct_pb2.Struct,
+ metadata_type=struct_pb2.Struct,
+ )
+
+ return operation_future, refresh, cancel
+
+
+@pytest.mark.asyncio
+async def test_constructor():
+ future, refresh, _ = make_operation_future()
+
+ assert future.operation == refresh.responses[0]
+ assert future.operation.done is False
+ assert future.operation.name == TEST_OPERATION_NAME
+ assert future.metadata is None
+ assert await future.running()
+
+
+def test_metadata():
+ expected_metadata = struct_pb2.Struct()
+ future, _, _ = make_operation_future(
+ [make_operation_proto(metadata=expected_metadata)]
+ )
+
+ assert future.metadata == expected_metadata
+
+
+@pytest.mark.asyncio
+async def test_cancellation():
+ responses = [
+ make_operation_proto(),
+ # Second response indicates that the operation was cancelled.
+ make_operation_proto(
+ done=True, error=status_pb2.Status(code=code_pb2.CANCELLED)
+ ),
+ ]
+ future, _, cancel = make_operation_future(responses)
+
+ assert await future.cancel()
+ assert await future.cancelled()
+ cancel.assert_called_once_with()
+
+ # Cancelling twice should have no effect.
+ assert not await future.cancel()
+ cancel.assert_called_once_with()
+
+
+@pytest.mark.asyncio
+async def test_result():
+ expected_result = struct_pb2.Struct()
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the result.
+ make_operation_proto(done=True, response=expected_result),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ result = await future.result()
+
+ assert result == expected_result
+ assert await future.done()
+
+
+@pytest.mark.asyncio
+async def test_done_w_retry():
+ RETRY_PREDICATE = retry_async.if_exception_type(exceptions.TooManyRequests)
+ test_retry = retry_async.AsyncRetry(predicate=RETRY_PREDICATE)
+
+ expected_result = struct_pb2.Struct()
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the result.
+ make_operation_proto(done=True, response=expected_result),
+ ]
+ future, refresh, _ = make_operation_future(responses)
+
+ await future.done(retry=test_retry)
+ refresh.assert_called_once_with(retry=test_retry)
+
+
+@pytest.mark.asyncio
+async def test_exception():
+ expected_exception = status_pb2.Status(message="meep")
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the error.
+ make_operation_proto(done=True, error=expected_exception),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ exception = await future.exception()
+
+ assert expected_exception.message in "{!r}".format(exception)
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@pytest.mark.asyncio
+async def test_unexpected_result(unused_sleep):
+ responses = [
+ make_operation_proto(),
+ # Second operation response is done, but has not error or response.
+ make_operation_proto(done=True),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ exception = await future.exception()
+
+ assert "Unexpected state" in "{!r}".format(exception)
+
+
+def test_from_gapic():
+ operation_proto = make_operation_proto(done=True)
+ operations_client = mock.create_autospec(
+ operations_v1.OperationsClient, instance=True
+ )
+
+ future = operation_async.from_gapic(
+ operation_proto,
+ operations_client,
+ struct_pb2.Struct,
+ metadata_type=struct_pb2.Struct,
+ grpc_metadata=[("x-goog-request-params", "foo")],
+ )
+
+ assert future._result_type == struct_pb2.Struct
+ assert future._metadata_type == struct_pb2.Struct
+ assert future.operation.name == TEST_OPERATION_NAME
+ assert future.done
+ assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
+ assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
+
+
+def test_deserialize():
+ op = make_operation_proto(name="foobarbaz")
+ serialized = op.SerializeToString()
+ deserialized_op = operation_async.AsyncOperation.deserialize(serialized)
+ assert op.name == deserialized_op.name
+ assert type(op) is type(deserialized_op)
diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py
new file mode 100644
index 0000000..75f9e1c
--- /dev/null
+++ b/tests/asyncio/test_page_iterator_async.py
@@ -0,0 +1,290 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+
+import mock
+import pytest
+
+from google.api_core import page_iterator_async
+
+
+class PageAsyncIteratorImpl(page_iterator_async.AsyncIterator):
+ async def _next_page(self):
+ return mock.create_autospec(page_iterator_async.Page, instance=True)
+
+
+class TestAsyncIterator:
+ def test_constructor(self):
+ client = mock.sentinel.client
+ item_to_value = mock.sentinel.item_to_value
+ token = "ab13nceor03"
+ max_results = 1337
+
+ iterator = PageAsyncIteratorImpl(
+ client, item_to_value, page_token=token, max_results=max_results
+ )
+
+ assert not iterator._started
+ assert iterator.client is client
+ assert iterator.item_to_value == item_to_value
+ assert iterator.max_results == max_results
+ # Changing attributes.
+ assert iterator.page_number == 0
+ assert iterator.next_page_token == token
+ assert iterator.num_results == 0
+
+ @pytest.mark.asyncio
+ async def test_anext(self):
+ parent = mock.sentinel.parent
+ page_1 = page_iterator_async.Page(
+ parent,
+ ("item 1.1", "item 1.2"),
+ page_iterator_async._item_to_value_identity,
+ )
+ page_2 = page_iterator_async.Page(
+ parent, ("item 2.1",), page_iterator_async._item_to_value_identity
+ )
+
+ async_iterator = PageAsyncIteratorImpl(None, None)
+ async_iterator._next_page = mock.AsyncMock(side_effect=[page_1, page_2, None])
+
+ # Consume items and check the state of the async_iterator.
+ assert async_iterator.num_results == 0
+ assert await async_iterator.__anext__() == "item 1.1"
+ assert async_iterator.num_results == 1
+
+ assert await async_iterator.__anext__() == "item 1.2"
+ assert async_iterator.num_results == 2
+
+ assert await async_iterator.__anext__() == "item 2.1"
+ assert async_iterator.num_results == 3
+
+ with pytest.raises(StopAsyncIteration):
+ await async_iterator.__anext__()
+
+ def test_pages_property_starts(self):
+ iterator = PageAsyncIteratorImpl(None, None)
+
+ assert not iterator._started
+
+ assert inspect.isasyncgen(iterator.pages)
+
+ assert iterator._started
+
+ def test_pages_property_restart(self):
+ iterator = PageAsyncIteratorImpl(None, None)
+
+ assert iterator.pages
+
+ # Make sure we cannot restart.
+ with pytest.raises(ValueError):
+ assert iterator.pages
+
+ @pytest.mark.asyncio
+ async def test__page_aiter_increment(self):
+ iterator = PageAsyncIteratorImpl(None, None)
+ page = page_iterator_async.Page(
+ iterator, ("item",), page_iterator_async._item_to_value_identity
+ )
+ iterator._next_page = mock.AsyncMock(side_effect=[page, None])
+
+ assert iterator.num_results == 0
+
+ page_aiter = iterator._page_aiter(increment=True)
+ await page_aiter.__anext__()
+
+ assert iterator.num_results == 1
+
+ @pytest.mark.asyncio
+ async def test__page_aiter_no_increment(self):
+ iterator = PageAsyncIteratorImpl(None, None)
+
+ assert iterator.num_results == 0
+
+ page_aiter = iterator._page_aiter(increment=False)
+ await page_aiter.__anext__()
+
+ # results should still be 0 after fetching a page.
+ assert iterator.num_results == 0
+
+ @pytest.mark.asyncio
+ async def test__items_aiter(self):
+ # Items to be returned.
+ item1 = 17
+ item2 = 100
+ item3 = 211
+
+ # Make pages from mock responses
+ parent = mock.sentinel.parent
+ page1 = page_iterator_async.Page(
+ parent, (item1, item2), page_iterator_async._item_to_value_identity
+ )
+ page2 = page_iterator_async.Page(
+ parent, (item3,), page_iterator_async._item_to_value_identity
+ )
+
+ iterator = PageAsyncIteratorImpl(None, None)
+ iterator._next_page = mock.AsyncMock(side_effect=[page1, page2, None])
+
+ items_aiter = iterator._items_aiter()
+
+ assert inspect.isasyncgen(items_aiter)
+
+ # Consume items and check the state of the iterator.
+ assert iterator.num_results == 0
+ assert await items_aiter.__anext__() == item1
+ assert iterator.num_results == 1
+
+ assert await items_aiter.__anext__() == item2
+ assert iterator.num_results == 2
+
+ assert await items_aiter.__anext__() == item3
+ assert iterator.num_results == 3
+
+ with pytest.raises(StopAsyncIteration):
+ await items_aiter.__anext__()
+
+ @pytest.mark.asyncio
+ async def test___aiter__(self):
+ async_iterator = PageAsyncIteratorImpl(None, None)
+ async_iterator._next_page = mock.AsyncMock(side_effect=[(1, 2), (3,), None])
+
+ assert not async_iterator._started
+
+ result = []
+ async for item in async_iterator:
+ result.append(item)
+
+ assert result == [1, 2, 3]
+ assert async_iterator._started
+
+ def test___aiter__restart(self):
+ iterator = PageAsyncIteratorImpl(None, None)
+
+ iterator.__aiter__()
+
+ # Make sure we cannot restart.
+ with pytest.raises(ValueError):
+ iterator.__aiter__()
+
+ def test___aiter___restart_after_page(self):
+ iterator = PageAsyncIteratorImpl(None, None)
+
+ assert iterator.pages
+
+ # Make sure we cannot restart after starting the page iterator
+ with pytest.raises(ValueError):
+ iterator.__aiter__()
+
+
+class TestAsyncGRPCIterator(object):
+ def test_constructor(self):
+ client = mock.sentinel.client
+ items_field = "items"
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ client, mock.sentinel.method, mock.sentinel.request, items_field
+ )
+
+ assert not iterator._started
+ assert iterator.client is client
+ assert iterator.max_results is None
+ assert iterator.item_to_value is page_iterator_async._item_to_value_identity
+ assert iterator._method == mock.sentinel.method
+ assert iterator._request == mock.sentinel.request
+ assert iterator._items_field == items_field
+ assert (
+ iterator._request_token_field
+ == page_iterator_async.AsyncGRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD
+ )
+ assert (
+ iterator._response_token_field
+ == page_iterator_async.AsyncGRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD
+ )
+ # Changing attributes.
+ assert iterator.page_number == 0
+ assert iterator.next_page_token is None
+ assert iterator.num_results == 0
+
+ def test_constructor_options(self):
+ client = mock.sentinel.client
+ items_field = "items"
+ request_field = "request"
+ response_field = "response"
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ client,
+ mock.sentinel.method,
+ mock.sentinel.request,
+ items_field,
+ item_to_value=mock.sentinel.item_to_value,
+ request_token_field=request_field,
+ response_token_field=response_field,
+ max_results=42,
+ )
+
+ assert iterator.client is client
+ assert iterator.max_results == 42
+ assert iterator.item_to_value is mock.sentinel.item_to_value
+ assert iterator._method == mock.sentinel.method
+ assert iterator._request == mock.sentinel.request
+ assert iterator._items_field == items_field
+ assert iterator._request_token_field == request_field
+ assert iterator._response_token_field == response_field
+
+ @pytest.mark.asyncio
+ async def test_iterate(self):
+ request = mock.Mock(spec=["page_token"], page_token=None)
+ response1 = mock.Mock(items=["a", "b"], next_page_token="1")
+ response2 = mock.Mock(items=["c"], next_page_token="2")
+ response3 = mock.Mock(items=["d"], next_page_token="")
+ method = mock.AsyncMock(side_effect=[response1, response2, response3])
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ mock.sentinel.client, method, request, "items"
+ )
+
+ assert iterator.num_results == 0
+
+ items = []
+ async for item in iterator:
+ items.append(item)
+
+ assert items == ["a", "b", "c", "d"]
+
+ method.assert_called_with(request)
+ assert method.call_count == 3
+ assert request.page_token == "2"
+
+ @pytest.mark.asyncio
+ async def test_iterate_with_max_results(self):
+ request = mock.Mock(spec=["page_token"], page_token=None)
+ response1 = mock.Mock(items=["a", "b"], next_page_token="1")
+ response2 = mock.Mock(items=["c"], next_page_token="2")
+ response3 = mock.Mock(items=["d"], next_page_token="")
+ method = mock.AsyncMock(side_effect=[response1, response2, response3])
+ iterator = page_iterator_async.AsyncGRPCIterator(
+ mock.sentinel.client, method, request, "items", max_results=3
+ )
+
+ assert iterator.num_results == 0
+
+ items = []
+ async for item in iterator:
+ items.append(item)
+
+ assert items == ["a", "b", "c"]
+ assert iterator.num_results == 3
+
+ method.assert_called_with(request)
+ assert method.call_count == 2
+ assert request.page_token == "1"
diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py
new file mode 100644
index 0000000..9e51044
--- /dev/null
+++ b/tests/asyncio/test_retry_async.py
@@ -0,0 +1,403 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import re
+
+import mock
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import retry_async
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+@pytest.mark.asyncio
+async def test_retry_target_success(utcnow, sleep):
+ predicate = retry_async.if_exception_type(ValueError)
+ call_count = [0]
+
+ async def target():
+ call_count[0] += 1
+ if call_count[0] < 3:
+ raise ValueError()
+ return 42
+
+ result = await retry_async.retry_target(target, predicate, range(10), None)
+
+ assert result == 42
+ assert call_count[0] == 3
+ sleep.assert_has_calls([mock.call(0), mock.call(1)])
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+@pytest.mark.asyncio
+async def test_retry_target_w_on_error(utcnow, sleep):
+ predicate = retry_async.if_exception_type(ValueError)
+ call_count = {"target": 0}
+ to_raise = ValueError()
+
+ async def target():
+ call_count["target"] += 1
+ if call_count["target"] < 3:
+ raise to_raise
+ return 42
+
+ on_error = mock.Mock()
+
+ result = await retry_async.retry_target(
+ target, predicate, range(10), None, on_error=on_error
+ )
+
+ assert result == 42
+ assert call_count["target"] == 3
+
+ on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
+ sleep.assert_has_calls([mock.call(0), mock.call(1)])
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+@pytest.mark.asyncio
+async def test_retry_target_non_retryable_error(utcnow, sleep):
+ predicate = retry_async.if_exception_type(ValueError)
+ exception = TypeError()
+ target = mock.Mock(side_effect=exception)
+
+ with pytest.raises(TypeError) as exc_info:
+ await retry_async.retry_target(target, predicate, range(10), None)
+
+ assert exc_info.value == exception
+ sleep.assert_not_called()
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
+@pytest.mark.asyncio
+async def test_retry_target_deadline_exceeded(utcnow, sleep):
+ predicate = retry_async.if_exception_type(ValueError)
+ exception = ValueError("meep")
+ target = mock.Mock(side_effect=exception)
+ # Setup the timeline so that the first call takes 5 seconds but the second
+ # call takes 6, which puts the retry over the deadline.
+ utcnow.side_effect = [
+ # The first call to utcnow establishes the start of the timeline.
+ datetime.datetime.min,
+ datetime.datetime.min + datetime.timedelta(seconds=5),
+ datetime.datetime.min + datetime.timedelta(seconds=11),
+ ]
+
+ with pytest.raises(exceptions.RetryError) as exc_info:
+ await retry_async.retry_target(target, predicate, range(10), deadline=10)
+
+ assert exc_info.value.cause == exception
+ assert exc_info.match("Deadline of 10.0s exceeded")
+ assert exc_info.match("last exception: meep")
+ assert target.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_retry_target_bad_sleep_generator():
+ with pytest.raises(ValueError, match="Sleep generator"):
+ await retry_async.retry_target(
+ mock.sentinel.target, mock.sentinel.predicate, [], None
+ )
+
+
+class TestAsyncRetry:
+ def test_constructor_defaults(self):
+ retry_ = retry_async.AsyncRetry()
+ assert retry_._predicate == retry_async.if_transient_error
+ assert retry_._initial == 1
+ assert retry_._maximum == 60
+ assert retry_._multiplier == 2
+ assert retry_._deadline == 120
+ assert retry_._on_error is None
+
+ def test_constructor_options(self):
+ _some_function = mock.Mock()
+
+ retry_ = retry_async.AsyncRetry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=_some_function,
+ )
+ assert retry_._predicate == mock.sentinel.predicate
+ assert retry_._initial == 1
+ assert retry_._maximum == 2
+ assert retry_._multiplier == 3
+ assert retry_._deadline == 4
+ assert retry_._on_error is _some_function
+
+ def test_with_deadline(self):
+ retry_ = retry_async.AsyncRetry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_deadline(42)
+ assert retry_ is not new_retry
+ assert new_retry._deadline == 42
+
+ # the rest of the attributes should remain the same
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_predicate(self):
+ retry_ = retry_async.AsyncRetry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_predicate(mock.sentinel.predicate)
+ assert retry_ is not new_retry
+ assert new_retry._predicate == mock.sentinel.predicate
+
+ # the rest of the attributes should remain the same
+ assert new_retry._deadline == retry_._deadline
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_noop(self):
+ retry_ = retry_async.AsyncRetry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay()
+ assert retry_ is not new_retry
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+
+ def test_with_delay(self):
+ retry_ = retry_async.AsyncRetry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 3
+
+ # the rest of the attributes should remain the same
+ assert new_retry._deadline == retry_._deadline
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry_async.AsyncRetry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ deadline=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<AsyncRetry predicate=<function.*?if_exception_type.*?>, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___and_execute_success(self, sleep):
+ retry_ = retry_async.AsyncRetry()
+ target = mock.AsyncMock(spec=["__call__"], return_value=42)
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target)
+ target.assert_not_called()
+
+ result = await decorated("meep")
+
+ assert result == 42
+ target.assert_called_once_with("meep")
+ sleep.assert_not_called()
+
+ # Make uniform return half of its maximum, which is the calculated sleep time.
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___and_execute_retry(self, sleep, uniform):
+
+ on_error = mock.Mock(spec=["__call__"], side_effect=[None])
+ retry_ = retry_async.AsyncRetry(
+ predicate=retry_async.if_exception_type(ValueError)
+ )
+
+ target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError(), 42])
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target, on_error=on_error)
+ target.assert_not_called()
+
+ result = await decorated("meep")
+
+ assert result == 42
+ assert target.call_count == 2
+ target.assert_has_calls([mock.call("meep"), mock.call("meep")])
+ sleep.assert_called_once_with(retry_._initial)
+ assert on_error.call_count == 1
+
+ # Make uniform return half of its maximum, which is the calculated sleep time.
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
+
+ on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
+ retry_ = retry_async.AsyncRetry(
+ predicate=retry_async.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ deadline=9.9,
+ )
+
+ utcnow = datetime.datetime.utcnow()
+ utcnow_patcher = mock.patch(
+ "google.api_core.datetime_helpers.utcnow", return_value=utcnow
+ )
+
+ target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10)
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target, on_error=on_error)
+ target.assert_not_called()
+
+ with utcnow_patcher as patched_utcnow:
+ # Make sure that calls to fake asyncio.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay)
+
+ sleep.side_effect = increase_time
+
+ with pytest.raises(exceptions.RetryError):
+ await decorated("meep")
+
+ assert target.call_count == 5
+ target.assert_has_calls([mock.call("meep")] * 5)
+ assert on_error.call_count == 5
+
+ # check the delays
+ assert sleep.call_count == 4 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+
+ assert last_wait == 2.9 # and not 8.0, because the last delay was shortened
+ assert total_wait == 9.9 # the same as the deadline
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___init___without_retry_executed(self, sleep):
+ _some_function = mock.Mock()
+
+ retry_ = retry_async.AsyncRetry(
+ predicate=retry_async.if_exception_type(ValueError), on_error=_some_function
+ )
+ # check the proper creation of the class
+ assert retry_._on_error is _some_function
+
+ target = mock.AsyncMock(spec=["__call__"], side_effect=[42])
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ wrapped = retry_(target)
+
+ result = await wrapped("meep")
+
+ assert result == 42
+ target.assert_called_once_with("meep")
+ sleep.assert_not_called()
+ _some_function.assert_not_called()
+
+ # Make uniform return half of its maximum, which is the calculated sleep time.
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___init___when_retry_is_executed(self, sleep, uniform):
+ _some_function = mock.Mock()
+
+ retry_ = retry_async.AsyncRetry(
+ predicate=retry_async.if_exception_type(ValueError), on_error=_some_function
+ )
+ # check the proper creation of the class
+ assert retry_._on_error is _some_function
+
+ target = mock.AsyncMock(
+ spec=["__call__"], side_effect=[ValueError(), ValueError(), 42]
+ )
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ wrapped = retry_(target)
+ target.assert_not_called()
+
+ result = await wrapped("meep")
+
+ assert result == 42
+ assert target.call_count == 3
+ assert _some_function.call_count == 2
+ target.assert_has_calls([mock.call("meep"), mock.call("meep")])
+ sleep.assert_any_call(retry_._initial)
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/__init__.py
diff --git a/tests/unit/future/__init__.py b/tests/unit/future/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/future/__init__.py
diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py
new file mode 100644
index 0000000..98afc59
--- /dev/null
+++ b/tests/unit/future/test__helpers.py
@@ -0,0 +1,37 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+
+from google.api_core.future import _helpers
+
+
+@mock.patch("threading.Thread", autospec=True)
+def test_start_deamon_thread(unused_thread):
+ deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target)
+ assert deamon_thread.daemon is True
+
+
+def test_safe_invoke_callback():
+ callback = mock.Mock(spec=["__call__"], return_value=42)
+ result = _helpers.safe_invoke_callback(callback, "a", b="c")
+ assert result == 42
+ callback.assert_called_once_with("a", b="c")
+
+
+def test_safe_invoke_callback_exception():
+ callback = mock.Mock(spec=["__call__"], side_effect=ValueError())
+ result = _helpers.safe_invoke_callback(callback, "a", b="c")
+ assert result is None
+ callback.assert_called_once_with("a", b="c")
diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py
new file mode 100644
index 0000000..2381d03
--- /dev/null
+++ b/tests/unit/future/test_polling.py
@@ -0,0 +1,242 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import concurrent.futures
+import threading
+import time
+
+import mock
+import pytest
+
+from google.api_core import exceptions, retry
+from google.api_core.future import polling
+
+
+class PollingFutureImpl(polling.PollingFuture):
+ def done(self):
+ return False
+
+ def cancel(self):
+ return True
+
+ def cancelled(self):
+ return False
+
+ def running(self):
+ return True
+
+
+def test_polling_future_constructor():
+ future = PollingFutureImpl()
+ assert not future.done()
+ assert not future.cancelled()
+ assert future.running()
+ assert future.cancel()
+ with mock.patch.object(future, "done", return_value=True):
+ future.result()
+
+
+def test_set_result():
+ future = PollingFutureImpl()
+ callback = mock.Mock()
+
+ future.set_result(1)
+
+ assert future.result() == 1
+ future.add_done_callback(callback)
+ callback.assert_called_once_with(future)
+
+
+def test_set_exception():
+ future = PollingFutureImpl()
+ exception = ValueError("meep")
+
+ future.set_exception(exception)
+
+ assert future.exception() == exception
+ with pytest.raises(ValueError):
+ future.result()
+
+ callback = mock.Mock()
+ future.add_done_callback(callback)
+ callback.assert_called_once_with(future)
+
+
+def test_invoke_callback_exception():
+ future = PollingFutureImplWithPoll()
+ future.set_result(42)
+
+ # This should not raise, despite the callback causing an exception.
+ callback = mock.Mock(side_effect=ValueError)
+ future.add_done_callback(callback)
+ callback.assert_called_once_with(future)
+
+
+class PollingFutureImplWithPoll(PollingFutureImpl):
+ def __init__(self):
+ super(PollingFutureImplWithPoll, self).__init__()
+ self.poll_count = 0
+ self.event = threading.Event()
+
+ def done(self, retry=polling.DEFAULT_RETRY):
+ self.poll_count += 1
+ self.event.wait()
+ self.set_result(42)
+ return True
+
+
+def test_result_with_polling():
+ future = PollingFutureImplWithPoll()
+
+ future.event.set()
+ result = future.result()
+
+ assert result == 42
+ assert future.poll_count == 1
+ # Repeated calls should not cause additional polling
+ assert future.result() == result
+ assert future.poll_count == 1
+
+
+class PollingFutureImplTimeout(PollingFutureImplWithPoll):
+ def done(self, retry=polling.DEFAULT_RETRY):
+ time.sleep(1)
+ return False
+
+
+def test_result_timeout():
+ future = PollingFutureImplTimeout()
+ with pytest.raises(concurrent.futures.TimeoutError):
+ future.result(timeout=1)
+
+
+def test_exception_timeout():
+ future = PollingFutureImplTimeout()
+ with pytest.raises(concurrent.futures.TimeoutError):
+ future.exception(timeout=1)
+
+
+class PollingFutureImplTransient(PollingFutureImplWithPoll):
+ def __init__(self, errors):
+ super(PollingFutureImplTransient, self).__init__()
+ self._errors = errors
+
+ def done(self, retry=polling.DEFAULT_RETRY):
+ if self._errors:
+ error, self._errors = self._errors[0], self._errors[1:]
+ raise error("testing")
+ self.poll_count += 1
+ self.set_result(42)
+ return True
+
+
+def test_result_transient_error():
+ future = PollingFutureImplTransient(
+ (
+ exceptions.TooManyRequests,
+ exceptions.InternalServerError,
+ exceptions.BadGateway,
+ )
+ )
+ result = future.result()
+ assert result == 42
+ assert future.poll_count == 1
+ # Repeated calls should not cause additional polling
+ assert future.result() == result
+ assert future.poll_count == 1
+
+
+def test_callback_background_thread():
+ future = PollingFutureImplWithPoll()
+ callback = mock.Mock()
+
+ future.add_done_callback(callback)
+
+ assert future._polling_thread is not None
+
+ # Give the thread a second to poll
+ time.sleep(1)
+ assert future.poll_count == 1
+
+ future.event.set()
+ future._polling_thread.join()
+
+ callback.assert_called_once_with(future)
+
+
+def test_double_callback_background_thread():
+ future = PollingFutureImplWithPoll()
+ callback = mock.Mock()
+ callback2 = mock.Mock()
+
+ future.add_done_callback(callback)
+ current_thread = future._polling_thread
+ assert current_thread is not None
+
+ # only one polling thread should be created.
+ future.add_done_callback(callback2)
+ assert future._polling_thread is current_thread
+
+ future.event.set()
+ future._polling_thread.join()
+
+ assert future.poll_count == 1
+ callback.assert_called_once_with(future)
+ callback2.assert_called_once_with(future)
+
+
+class PollingFutureImplWithoutRetry(PollingFutureImpl):
+ def done(self):
+ return True
+
+ def result(self):
+ return super(PollingFutureImplWithoutRetry, self).result()
+
+ def _blocking_poll(self, timeout):
+ return super(PollingFutureImplWithoutRetry, self)._blocking_poll(
+ timeout=timeout
+ )
+
+
+class PollingFutureImplWith_done_or_raise(PollingFutureImpl):
+ def done(self):
+ return True
+
+ def _done_or_raise(self):
+ return super(PollingFutureImplWith_done_or_raise, self)._done_or_raise()
+
+
+def test_polling_future_without_retry():
+ custom_retry = retry.Retry(
+ predicate=retry.if_exception_type(exceptions.TooManyRequests)
+ )
+ future = PollingFutureImplWithoutRetry()
+ assert future.done()
+ assert future.running()
+ assert future.result() is None
+
+ with mock.patch.object(future, "done") as done_mock:
+ future._done_or_raise()
+ done_mock.assert_called_once_with()
+
+ with mock.patch.object(future, "done") as done_mock:
+ future._done_or_raise(retry=custom_retry)
+ done_mock.assert_called_once_with(retry=custom_retry)
+
+
+def test_polling_future_with__done_or_raise():
+ future = PollingFutureImplWith_done_or_raise()
+ assert future.done()
+ assert future.running()
+ assert future.result() is None
diff --git a/tests/unit/gapic/test_client_info.py b/tests/unit/gapic/test_client_info.py
new file mode 100644
index 0000000..2ca5c40
--- /dev/null
+++ b/tests/unit/gapic/test_client_info.py
@@ -0,0 +1,31 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+
+from google.api_core.gapic_v1 import client_info
+
+
+def test_to_grpc_metadata():
+ info = client_info.ClientInfo()
+
+ metadata = info.to_grpc_metadata()
+
+ assert metadata == (client_info.METRICS_METADATA_KEY, info.to_user_agent())
diff --git a/tests/unit/gapic/test_config.py b/tests/unit/gapic/test_config.py
new file mode 100644
index 0000000..5e42fde
--- /dev/null
+++ b/tests/unit/gapic/test_config.py
@@ -0,0 +1,94 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core.gapic_v1 import config
+
+
+INTERFACE_CONFIG = {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "other": ["FAILED_PRECONDITION"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 2.5,
+ "max_retry_delay_millis": 120000,
+ "initial_rpc_timeout_millis": 120000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 120000,
+ "total_timeout_millis": 600000,
+ },
+ "other": {
+ "initial_retry_delay_millis": 1000,
+ "retry_delay_multiplier": 1,
+ "max_retry_delay_millis": 1000,
+ "initial_rpc_timeout_millis": 1000,
+ "rpc_timeout_multiplier": 1,
+ "max_rpc_timeout_millis": 1000,
+ "total_timeout_millis": 1000,
+ },
+ },
+ "methods": {
+ "AnnotateVideo": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ },
+ "Other": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "other",
+ "retry_params_name": "other",
+ },
+ "Plain": {"timeout_millis": 30000},
+ },
+}
+
+
+def test_create_method_configs():
+ method_configs = config.parse_method_configs(INTERFACE_CONFIG)
+
+ retry, timeout = method_configs["AnnotateVideo"]
+ assert retry._predicate(exceptions.DeadlineExceeded(None))
+ assert retry._predicate(exceptions.ServiceUnavailable(None))
+ assert retry._initial == 1.0
+ assert retry._multiplier == 2.5
+ assert retry._maximum == 120.0
+ assert retry._deadline == 600.0
+ assert timeout._initial == 120.0
+ assert timeout._multiplier == 1.0
+ assert timeout._maximum == 120.0
+
+ retry, timeout = method_configs["Other"]
+ assert retry._predicate(exceptions.FailedPrecondition(None))
+ assert retry._initial == 1.0
+ assert retry._multiplier == 1.0
+ assert retry._maximum == 1.0
+ assert retry._deadline == 1.0
+ assert timeout._initial == 1.0
+ assert timeout._multiplier == 1.0
+ assert timeout._maximum == 1.0
+
+ retry, timeout = method_configs["Plain"]
+ assert retry is None
+ assert timeout._timeout == 30.0
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
new file mode 100644
index 0000000..9778d23
--- /dev/null
+++ b/tests/unit/gapic/test_method.py
@@ -0,0 +1,244 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core import timeout
+import google.api_core.gapic_v1.client_info
+import google.api_core.gapic_v1.method
+import google.api_core.page_iterator
+
+
+def _utcnow_monotonic():
+ curr_value = datetime.datetime.min
+ delta = datetime.timedelta(seconds=0.5)
+ while True:
+ yield curr_value
+ curr_value += delta
+
+
+def test__determine_timeout():
+ # Check _determine_timeout always returns a Timeout object.
+ timeout_type_timeout = timeout.ConstantTimeout(600.0)
+ returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
+ 600.0, 600.0, None
+ )
+ assert isinstance(returned_timeout, timeout.ConstantTimeout)
+ returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
+ 600.0, timeout_type_timeout, None
+ )
+ assert isinstance(returned_timeout, timeout.ConstantTimeout)
+ returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
+ timeout_type_timeout, 600.0, None
+ )
+ assert isinstance(returned_timeout, timeout.ConstantTimeout)
+ returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
+ timeout_type_timeout, timeout_type_timeout, None
+ )
+ assert isinstance(returned_timeout, timeout.ConstantTimeout)
+
+
+def test_wrap_method_basic():
+ method = mock.Mock(spec=["__call__"], return_value=42)
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
+
+ result = wrapped_method(1, 2, meep="moop")
+
+ assert result == 42
+ method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
+
+ # Check that the default client info was specified in the metadata.
+ metadata = method.call_args[1]["metadata"]
+ assert len(metadata) == 1
+ client_info = google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO
+ user_agent_metadata = client_info.to_grpc_metadata()
+ assert user_agent_metadata in metadata
+
+
+def test_wrap_method_with_no_client_info():
+ method = mock.Mock(spec=["__call__"])
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, client_info=None
+ )
+
+ wrapped_method(1, 2, meep="moop")
+
+ method.assert_called_once_with(1, 2, meep="moop")
+
+
+def test_wrap_method_with_custom_client_info():
+ client_info = google.api_core.gapic_v1.client_info.ClientInfo(
+ python_version=1,
+ grpc_version=2,
+ api_core_version=3,
+ gapic_version=4,
+ client_library_version=5,
+ )
+ method = mock.Mock(spec=["__call__"])
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, client_info=client_info
+ )
+
+ wrapped_method(1, 2, meep="moop")
+
+ method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
+
+ # Check that the custom client info was specified in the metadata.
+ metadata = method.call_args[1]["metadata"]
+ assert client_info.to_grpc_metadata() in metadata
+
+
+def test_invoke_wrapped_method_with_metadata():
+ method = mock.Mock(spec=["__call__"])
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
+
+ wrapped_method(mock.sentinel.request, metadata=[("a", "b")])
+
+ method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
+ metadata = method.call_args[1]["metadata"]
+ # Metadata should have two items: the client info metadata and our custom
+ # metadata.
+ assert len(metadata) == 2
+ assert ("a", "b") in metadata
+
+
+def test_invoke_wrapped_method_with_metadata_as_none():
+ method = mock.Mock(spec=["__call__"])
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
+
+ wrapped_method(mock.sentinel.request, metadata=None)
+
+ method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
+ metadata = method.call_args[1]["metadata"]
+ # Metadata should have just one items: the client info metadata.
+ assert len(metadata) == 1
+
+
+@mock.patch("time.sleep")
+def test_wrap_method_with_default_retry_and_timeout(unusued_sleep):
+ method = mock.Mock(
+ spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
+ )
+ default_retry = retry.Retry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = wrapped_method()
+
+ assert result == 42
+ assert method.call_count == 2
+ method.assert_called_with(timeout=60, metadata=mock.ANY)
+
+
+@mock.patch("time.sleep")
+def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unusued_sleep):
+ method = mock.Mock(
+ spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
+ )
+ default_retry = retry.Retry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = wrapped_method(
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ )
+
+ assert result == 42
+ assert method.call_count == 2
+ method.assert_called_with(timeout=60, metadata=mock.ANY)
+
+
+@mock.patch("time.sleep")
+def test_wrap_method_with_overriding_retry_and_timeout(unusued_sleep):
+ method = mock.Mock(spec=["__call__"], side_effect=[exceptions.NotFound(None), 42])
+ default_retry = retry.Retry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = wrapped_method(
+ retry=retry.Retry(retry.if_exception_type(exceptions.NotFound)),
+ timeout=timeout.ConstantTimeout(22),
+ )
+
+ assert result == 42
+ assert method.call_count == 2
+ method.assert_called_with(timeout=22, metadata=mock.ANY)
+
+
+@mock.patch("time.sleep")
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ side_effect=_utcnow_monotonic(),
+ autospec=True,
+)
+def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
+ method = mock.Mock(
+ spec=["__call__"],
+ side_effect=([exceptions.InternalServerError(None)] * 4) + [42],
+ )
+ default_retry = retry.Retry()
+ default_timeout = timeout.ExponentialTimeout(deadline=60)
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ # Overriding only the retry's deadline should also override the timeout's
+ # deadline.
+ result = wrapped_method(retry=default_retry.with_deadline(30))
+
+ assert result == 42
+ timeout_args = [call[1]["timeout"] for call in method.call_args_list]
+ assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
+ assert utcnow.call_count == (
+ 1
+ + 5 # First to set the deadline.
+ + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
+ )
+
+
+def test_wrap_method_with_overriding_timeout_as_a_number():
+ method = mock.Mock(spec=["__call__"], return_value=42)
+ default_retry = retry.Retry()
+ default_timeout = timeout.ConstantTimeout(60)
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(
+ method, default_retry, default_timeout
+ )
+
+ result = wrapped_method(timeout=22)
+
+ assert result == 42
+ method.assert_called_once_with(timeout=22, metadata=mock.ANY)
diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py
new file mode 100644
index 0000000..3037867
--- /dev/null
+++ b/tests/unit/gapic/test_routing_header.py
@@ -0,0 +1,41 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+
+from google.api_core.gapic_v1 import routing_header
+
+
+def test_to_routing_header():
+ params = [("name", "meep"), ("book.read", "1")]
+ value = routing_header.to_routing_header(params)
+ assert value == "name=meep&book.read=1"
+
+
+def test_to_routing_header_with_slashes():
+ params = [("name", "me/ep"), ("book.read", "1&2")]
+ value = routing_header.to_routing_header(params)
+ assert value == "name=me/ep&book.read=1%262"
+
+
+def test_to_grpc_metadata():
+ params = [("name", "meep"), ("book.read", "1")]
+ metadata = routing_header.to_grpc_metadata(params)
+ assert metadata == (routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")
diff --git a/tests/unit/operations_v1/__init__.py b/tests/unit/operations_v1/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/operations_v1/__init__.py
diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py
new file mode 100644
index 0000000..187f0be
--- /dev/null
+++ b/tests/unit/operations_v1/test_operations_client.py
@@ -0,0 +1,98 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import grpc_helpers
+from google.api_core import operations_v1
+from google.api_core import page_iterator
+from google.longrunning import operations_pb2
+from google.protobuf import empty_pb2
+
+
+def test_get_operation():
+ channel = grpc_helpers.ChannelStub()
+ client = operations_v1.OperationsClient(channel)
+ channel.GetOperation.response = operations_pb2.Operation(name="meep")
+
+ response = client.get_operation("name", metadata=[("header", "foo")])
+
+ assert ("header", "foo") in channel.GetOperation.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.GetOperation.calls[
+ 0
+ ].metadata
+ assert len(channel.GetOperation.requests) == 1
+ assert channel.GetOperation.requests[0].name == "name"
+ assert response == channel.GetOperation.response
+
+
+def test_list_operations():
+ channel = grpc_helpers.ChannelStub()
+ client = operations_v1.OperationsClient(channel)
+ operations = [
+ operations_pb2.Operation(name="1"),
+ operations_pb2.Operation(name="2"),
+ ]
+ list_response = operations_pb2.ListOperationsResponse(operations=operations)
+ channel.ListOperations.response = list_response
+
+ response = client.list_operations("name", "filter", metadata=[("header", "foo")])
+
+ assert isinstance(response, page_iterator.Iterator)
+ assert list(response) == operations
+
+ assert ("header", "foo") in channel.ListOperations.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.ListOperations.calls[
+ 0
+ ].metadata
+ assert len(channel.ListOperations.requests) == 1
+ request = channel.ListOperations.requests[0]
+ assert isinstance(request, operations_pb2.ListOperationsRequest)
+ assert request.name == "name"
+ assert request.filter == "filter"
+
+
+def test_delete_operation():
+ channel = grpc_helpers.ChannelStub()
+ client = operations_v1.OperationsClient(channel)
+ channel.DeleteOperation.response = empty_pb2.Empty()
+
+ client.delete_operation("name", metadata=[("header", "foo")])
+
+ assert ("header", "foo") in channel.DeleteOperation.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.DeleteOperation.calls[
+ 0
+ ].metadata
+ assert len(channel.DeleteOperation.requests) == 1
+ assert channel.DeleteOperation.requests[0].name == "name"
+
+
+def test_cancel_operation():
+ channel = grpc_helpers.ChannelStub()
+ client = operations_v1.OperationsClient(channel)
+ channel.CancelOperation.response = empty_pb2.Empty()
+
+ client.cancel_operation("name", metadata=[("header", "foo")])
+
+ assert ("header", "foo") in channel.CancelOperation.calls[0].metadata
+ assert ("x-goog-request-params", "name=name") in channel.CancelOperation.calls[
+ 0
+ ].metadata
+ assert len(channel.CancelOperation.requests) == 1
+ assert channel.CancelOperation.requests[0].name == "name"
diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py
new file mode 100644
index 0000000..dddf6b7
--- /dev/null
+++ b/tests/unit/operations_v1/test_operations_rest_client.py
@@ -0,0 +1,944 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+
+import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+from requests import Response # noqa I201
+from requests.sessions import Session
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core.operations_v1 import AbstractOperationsClient
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1 import transports
+import google.auth
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import json_format # type: ignore
+from google.rpc import status_pb2 # type: ignore
+
+
+HTTP_OPTIONS = {
+ "google.longrunning.Operations.CancelOperation": [
+ {"method": "post", "uri": "/v3/{name=operations/*}:cancel", "body": "*"},
+ ],
+ "google.longrunning.Operations.DeleteOperation": [
+ {"method": "delete", "uri": "/v3/{name=operations/*}"},
+ ],
+ "google.longrunning.Operations.GetOperation": [
+ {"method": "get", "uri": "/v3/{name=operations/*}"},
+ ],
+ "google.longrunning.Operations.ListOperations": [
+ {"method": "get", "uri": "/v3/{name=operations}"},
+ ],
+}
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+def _get_operations_client(http_options=HTTP_OPTIONS):
+ transport = transports.rest.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+ )
+
+ return AbstractOperationsClient(transport=transport)
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert AbstractOperationsClient._get_default_mtls_endpoint(None) is None
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize("client_class", [AbstractOperationsClient])
+def test_operations_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "longrunning.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "transport_class,transport_name", [(transports.OperationsRestTransport, "rest")]
+)
+def test_operations_client_service_account_always_use_jwt(
+ transport_class, transport_name
+):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport_class(credentials=creds, always_use_jwt_access=False)
+ use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class", [AbstractOperationsClient])
+def test_operations_client_from_service_account_file(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "longrunning.googleapis.com:443"
+
+
+def test_operations_client_get_transport_class():
+ transport = AbstractOperationsClient.get_transport_class()
+ available_transports = [
+ transports.OperationsRestTransport,
+ ]
+ assert transport in available_transports
+
+ transport = AbstractOperationsClient.get_transport_class("rest")
+ assert transport == transports.OperationsRestTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+@mock.patch.object(
+ AbstractOperationsClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(AbstractOperationsClient),
+)
+def test_operations_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "true"),
+ (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "false"),
+ ],
+)
+@mock.patch.object(
+ AbstractOperationsClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(AbstractOperationsClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_operations_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+
+ def fake_init(client_cert_source_for_mtls=None, **kwargs):
+ """Invoke client_cert source if provided."""
+
+ if client_cert_source_for_mtls:
+ client_cert_source_for_mtls()
+ return None
+
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.side_effect = fake_init
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+def test_operations_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+def test_operations_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+def test_list_operations_rest(
+ transport: str = "rest", request_type=operations_pb2.ListOperationsRequest
+):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.list_operations(
+ name="operations", filter_="my_filter", page_size=10, page_token="abc"
+ )
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations"
+ )
+ assert actual_args.kwargs["params"] == [
+ ("filter", "my_filter"),
+ ("pageSize", 10),
+ ("pageToken", "abc"),
+ ]
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListOperationsPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_operations_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.list_operations(name="operations")
+
+
+def test_list_operations_rest_pager():
+ client = AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ operations_pb2.ListOperationsResponse(
+ operations=[
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ ],
+ next_page_token="abc",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[], next_page_token="def",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation()], next_page_token="ghi",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation(), operations_pb2.Operation()],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(json_format.MessageToJson(x) for x in response)
+ return_values = tuple(Response() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val._content = response_val.encode("UTF-8")
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ pager = client.list_operations(name="operations")
+
+ results = list(pager)
+ assert len(results) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+
+ pages = list(client.list_operations(name="operations").pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.next_page_token == token
+
+
+def test_get_operation_rest(
+ transport: str = "rest", request_type=operations_pb2.GetOperationRequest
+):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(
+ name="operations/sample1", done=True, error=status_pb2.Status(code=411),
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.get_operation("operations/sample1")
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations/sample1"
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+ assert response.name == "operations/sample1"
+ assert response.done is True
+
+
+def test_get_operation_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = (
+ "https://longrunning.googleapis.com:443/v1/operations/sample1"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.get_operation("operations/sample1")
+
+
+def test_delete_operation_rest(
+ transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest
+):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ client.delete_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "DELETE"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations/sample1"
+ )
+
+
+def test_delete_operation_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "DELETE"
+ mock_request.url = (
+ "https://longrunning.googleapis.com:443/v1/operations/sample1"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.delete_operation(name="operations/sample1")
+
+
+def test_cancel_operation_rest(transport: str = "rest"):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ client.cancel_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "POST"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations/sample1:cancel"
+ )
+
+
+def test_cancel_operation_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "POST"
+ mock_request.url = (
+ "https://longrunning.googleapis.com:443/v1/operations/sample1:cancel"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.cancel_operation(name="operations/sample1")
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ AbstractOperationsClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ AbstractOperationsClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = AbstractOperationsClient(transport=transport)
+ assert client.transport is transport
+
+
+@pytest.mark.parametrize("transport_class", [transports.OperationsRestTransport])
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_operations_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+ transports.OperationsTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_operations_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.OperationsTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "list_operations",
+ "get_operation",
+ "delete_operation",
+ "cancel_operation",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
+
+def test_operations_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transports.OperationsTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=(),
+ quota_project_id="octopus",
+ )
+
+
+def test_operations_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transports.OperationsTransport()
+ adc.assert_called_once()
+
+
+def test_operations_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ AbstractOperationsClient()
+ adc.assert_called_once_with(
+ scopes=None, default_scopes=(), quota_project_id=None,
+ )
+
+
+def test_operations_http_transport_client_cert_source_for_mtls():
+ cred = ga_credentials.AnonymousCredentials()
+ with mock.patch(
+ "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
+ ) as mock_configure_mtls_channel:
+ transports.OperationsRestTransport(
+ credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
+ )
+ mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+def test_operations_host_no_port():
+ client = AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="longrunning.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "longrunning.googleapis.com:443"
+
+
+def test_operations_host_with_port():
+ client = AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="longrunning.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "longrunning.googleapis.com:8000"
+
+
+def test_common_billing_account_path():
+ billing_account = "squid"
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = AbstractOperationsClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = AbstractOperationsClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "whelk"
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = AbstractOperationsClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = AbstractOperationsClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "oyster"
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = AbstractOperationsClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = AbstractOperationsClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "cuttlefish"
+ expected = "projects/{project}".format(project=project,)
+ actual = AbstractOperationsClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = AbstractOperationsClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = AbstractOperationsClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = AbstractOperationsClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.OperationsTransport, "_prep_wrapped_messages"
+ ) as prep:
+ AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.OperationsTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = AbstractOperationsClient.get_transport_class()
+ transport_class(
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
new file mode 100644
index 0000000..7fb1620
--- /dev/null
+++ b/tests/unit/test_bidi.py
@@ -0,0 +1,869 @@
+# Copyright 2018, Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import logging
+import queue
+import threading
+
+import mock
+import pytest
+
+try:
+ import grpc
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import bidi
+from google.api_core import exceptions
+
+
+class Test_RequestQueueGenerator(object):
+ def test_bounded_consume(self):
+ call = mock.create_autospec(grpc.Call, instance=True)
+ call.is_active.return_value = True
+
+ def queue_generator(rpc):
+ yield mock.sentinel.A
+ yield queue.Empty()
+ yield mock.sentinel.B
+ rpc.is_active.return_value = False
+ yield mock.sentinel.C
+
+ q = mock.create_autospec(queue.Queue, instance=True)
+ q.get.side_effect = queue_generator(call)
+
+ generator = bidi._RequestQueueGenerator(q)
+ generator.call = call
+
+ items = list(generator)
+
+ assert items == [mock.sentinel.A, mock.sentinel.B]
+
+ def test_yield_initial_and_exit(self):
+ q = mock.create_autospec(queue.Queue, instance=True)
+ q.get.side_effect = queue.Empty()
+ call = mock.create_autospec(grpc.Call, instance=True)
+ call.is_active.return_value = False
+
+ generator = bidi._RequestQueueGenerator(q, initial_request=mock.sentinel.A)
+ generator.call = call
+
+ items = list(generator)
+
+ assert items == [mock.sentinel.A]
+
+ def test_yield_initial_callable_and_exit(self):
+ q = mock.create_autospec(queue.Queue, instance=True)
+ q.get.side_effect = queue.Empty()
+ call = mock.create_autospec(grpc.Call, instance=True)
+ call.is_active.return_value = False
+
+ generator = bidi._RequestQueueGenerator(
+ q, initial_request=lambda: mock.sentinel.A
+ )
+ generator.call = call
+
+ items = list(generator)
+
+ assert items == [mock.sentinel.A]
+
+ def test_exit_when_inactive_with_item(self):
+ q = mock.create_autospec(queue.Queue, instance=True)
+ q.get.side_effect = [mock.sentinel.A, queue.Empty()]
+ call = mock.create_autospec(grpc.Call, instance=True)
+ call.is_active.return_value = False
+
+ generator = bidi._RequestQueueGenerator(q)
+ generator.call = call
+
+ items = list(generator)
+
+ assert items == []
+ # Make sure it put the item back.
+ q.put.assert_called_once_with(mock.sentinel.A)
+
+ def test_exit_when_inactive_empty(self):
+ q = mock.create_autospec(queue.Queue, instance=True)
+ q.get.side_effect = queue.Empty()
+ call = mock.create_autospec(grpc.Call, instance=True)
+ call.is_active.return_value = False
+
+ generator = bidi._RequestQueueGenerator(q)
+ generator.call = call
+
+ items = list(generator)
+
+ assert items == []
+
+ def test_exit_with_stop(self):
+ q = mock.create_autospec(queue.Queue, instance=True)
+ q.get.side_effect = [None, queue.Empty()]
+ call = mock.create_autospec(grpc.Call, instance=True)
+ call.is_active.return_value = True
+
+ generator = bidi._RequestQueueGenerator(q)
+ generator.call = call
+
+ items = list(generator)
+
+ assert items == []
+
+
+class Test_Throttle(object):
+ def test_repr(self):
+ delta = datetime.timedelta(seconds=4.5)
+ instance = bidi._Throttle(access_limit=42, time_window=delta)
+ assert repr(instance) == "_Throttle(access_limit=42, time_window={})".format(
+ repr(delta)
+ )
+
+ def test_raises_error_on_invalid_init_arguments(self):
+ with pytest.raises(ValueError) as exc_info:
+ bidi._Throttle(access_limit=10, time_window=datetime.timedelta(seconds=0.0))
+ assert "time_window" in str(exc_info.value)
+ assert "must be a positive timedelta" in str(exc_info.value)
+
+ with pytest.raises(ValueError) as exc_info:
+ bidi._Throttle(access_limit=0, time_window=datetime.timedelta(seconds=10))
+ assert "access_limit" in str(exc_info.value)
+ assert "must be positive" in str(exc_info.value)
+
+ def test_does_not_delay_entry_attempts_under_threshold(self):
+ throttle = bidi._Throttle(
+ access_limit=3, time_window=datetime.timedelta(seconds=1)
+ )
+ entries = []
+
+ for _ in range(3):
+ with throttle as time_waited:
+ entry_info = {
+ "entered_at": datetime.datetime.now(),
+ "reported_wait": time_waited,
+ }
+ entries.append(entry_info)
+
+ # check the reported wait times ...
+ assert all(entry["reported_wait"] == 0.0 for entry in entries)
+
+ # .. and the actual wait times
+ delta = entries[1]["entered_at"] - entries[0]["entered_at"]
+ assert delta.total_seconds() < 0.1
+ delta = entries[2]["entered_at"] - entries[1]["entered_at"]
+ assert delta.total_seconds() < 0.1
+
+ def test_delays_entry_attempts_above_threshold(self):
+ throttle = bidi._Throttle(
+ access_limit=3, time_window=datetime.timedelta(seconds=1)
+ )
+ entries = []
+
+ for _ in range(6):
+ with throttle as time_waited:
+ entry_info = {
+ "entered_at": datetime.datetime.now(),
+ "reported_wait": time_waited,
+ }
+ entries.append(entry_info)
+
+ # For each group of 4 consecutive entries the time difference between
+ # the first and the last entry must have been greater than time_window,
+ # because a maximum of 3 are allowed in each time_window.
+ for i, entry in enumerate(entries[3:], start=3):
+ first_entry = entries[i - 3]
+ delta = entry["entered_at"] - first_entry["entered_at"]
+ assert delta.total_seconds() > 1.0
+
+ # check the reported wait times
+ # (NOTE: not using assert all(...), b/c the coverage check would complain)
+ for i, entry in enumerate(entries):
+ if i != 3:
+ assert entry["reported_wait"] == 0.0
+
+ # The delayed entry is expected to have been delayed for a significant
+ # chunk of the full second, and the actual and reported delay times
+ # should reflect that.
+ assert entries[3]["reported_wait"] > 0.7
+ delta = entries[3]["entered_at"] - entries[2]["entered_at"]
+ assert delta.total_seconds() > 0.7
+
+
+class _CallAndFuture(grpc.Call, grpc.Future):
+ pass
+
+
+def make_rpc():
+ """Makes a mock RPC used to test Bidi classes."""
+ call = mock.create_autospec(_CallAndFuture, instance=True)
+ rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
+
+ def rpc_side_effect(request, metadata=None):
+ call.is_active.return_value = True
+ call.request = request
+ call.metadata = metadata
+ return call
+
+ rpc.side_effect = rpc_side_effect
+
+ def cancel_side_effect():
+ call.is_active.return_value = False
+
+ call.cancel.side_effect = cancel_side_effect
+
+ return rpc, call
+
+
+class ClosedCall(object):
+ def __init__(self, exception):
+ self.exception = exception
+
+ def __next__(self):
+ raise self.exception
+
+ def is_active(self):
+ return False
+
+
+class TestBidiRpc(object):
+ def test_initial_state(self):
+ bidi_rpc = bidi.BidiRpc(None)
+
+ assert bidi_rpc.is_active is False
+
+ def test_done_callbacks(self):
+ bidi_rpc = bidi.BidiRpc(None)
+ callback = mock.Mock(spec=["__call__"])
+
+ bidi_rpc.add_done_callback(callback)
+ bidi_rpc._on_call_done(mock.sentinel.future)
+
+ callback.assert_called_once_with(mock.sentinel.future)
+
+ def test_metadata(self):
+ rpc, call = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc, metadata=mock.sentinel.A)
+ assert bidi_rpc._rpc_metadata == mock.sentinel.A
+
+ bidi_rpc.open()
+ assert bidi_rpc.call == call
+ assert bidi_rpc.call.metadata == mock.sentinel.A
+
+ def test_open(self):
+ rpc, call = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+
+ bidi_rpc.open()
+
+ assert bidi_rpc.call == call
+ assert bidi_rpc.is_active
+ call.add_done_callback.assert_called_once_with(bidi_rpc._on_call_done)
+
+ def test_open_error_already_open(self):
+ rpc, _ = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+
+ bidi_rpc.open()
+
+ with pytest.raises(ValueError):
+ bidi_rpc.open()
+
+ def test_close(self):
+ rpc, call = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+ bidi_rpc.open()
+
+ bidi_rpc.close()
+
+ call.cancel.assert_called_once()
+ assert bidi_rpc.call == call
+ assert bidi_rpc.is_active is False
+ # ensure the request queue was signaled to stop.
+ assert bidi_rpc.pending_requests == 1
+ assert bidi_rpc._request_queue.get() is None
+
+ def test_close_no_rpc(self):
+ bidi_rpc = bidi.BidiRpc(None)
+ bidi_rpc.close()
+
+ def test_send(self):
+ rpc, call = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+ bidi_rpc.open()
+
+ bidi_rpc.send(mock.sentinel.request)
+
+ assert bidi_rpc.pending_requests == 1
+ assert bidi_rpc._request_queue.get() is mock.sentinel.request
+
+ def test_send_not_open(self):
+ rpc, call = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+
+ with pytest.raises(ValueError):
+ bidi_rpc.send(mock.sentinel.request)
+
+ def test_send_dead_rpc(self):
+ error = ValueError()
+ bidi_rpc = bidi.BidiRpc(None)
+ bidi_rpc.call = ClosedCall(error)
+
+ with pytest.raises(ValueError) as exc_info:
+ bidi_rpc.send(mock.sentinel.request)
+
+ assert exc_info.value == error
+
+ def test_recv(self):
+ bidi_rpc = bidi.BidiRpc(None)
+ bidi_rpc.call = iter([mock.sentinel.response])
+
+ response = bidi_rpc.recv()
+
+ assert response == mock.sentinel.response
+
+ def test_recv_not_open(self):
+ rpc, call = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+
+ with pytest.raises(ValueError):
+ bidi_rpc.recv()
+
+
+class CallStub(object):
+ def __init__(self, values, active=True):
+ self.values = iter(values)
+ self._is_active = active
+ self.cancelled = False
+
+ def __next__(self):
+ item = next(self.values)
+ if isinstance(item, Exception):
+ self._is_active = False
+ raise item
+ return item
+
+ def is_active(self):
+ return self._is_active
+
+ def add_done_callback(self, callback):
+ pass
+
+ def cancel(self):
+ self.cancelled = True
+
+
+class TestResumableBidiRpc(object):
+ def test_ctor_defaults(self):
+ start_rpc = mock.Mock()
+ should_recover = mock.Mock()
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+
+ assert bidi_rpc.is_active is False
+ assert bidi_rpc._finalized is False
+ assert bidi_rpc._start_rpc is start_rpc
+ assert bidi_rpc._should_recover is should_recover
+ assert bidi_rpc._should_terminate is bidi._never_terminate
+ assert bidi_rpc._initial_request is None
+ assert bidi_rpc._rpc_metadata is None
+ assert bidi_rpc._reopen_throttle is None
+
+ def test_ctor_explicit(self):
+ start_rpc = mock.Mock()
+ should_recover = mock.Mock()
+ should_terminate = mock.Mock()
+ initial_request = mock.Mock()
+ metadata = {"x-foo": "bar"}
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc,
+ should_recover,
+ should_terminate=should_terminate,
+ initial_request=initial_request,
+ metadata=metadata,
+ throttle_reopen=True,
+ )
+
+ assert bidi_rpc.is_active is False
+ assert bidi_rpc._finalized is False
+ assert bidi_rpc._should_recover is should_recover
+ assert bidi_rpc._should_terminate is should_terminate
+ assert bidi_rpc._initial_request is initial_request
+ assert bidi_rpc._rpc_metadata == metadata
+ assert isinstance(bidi_rpc._reopen_throttle, bidi._Throttle)
+
+ def test_done_callbacks_terminate(self):
+ cancellation = mock.Mock()
+ start_rpc = mock.Mock()
+ should_recover = mock.Mock(spec=["__call__"], return_value=True)
+ should_terminate = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc, should_recover, should_terminate=should_terminate
+ )
+ callback = mock.Mock(spec=["__call__"])
+
+ bidi_rpc.add_done_callback(callback)
+ bidi_rpc._on_call_done(cancellation)
+
+ should_terminate.assert_called_once_with(cancellation)
+ should_recover.assert_not_called()
+ callback.assert_called_once_with(cancellation)
+ assert not bidi_rpc.is_active
+
+ def test_done_callbacks_recoverable(self):
+ start_rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
+ should_recover = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+ callback = mock.Mock(spec=["__call__"])
+
+ bidi_rpc.add_done_callback(callback)
+ bidi_rpc._on_call_done(mock.sentinel.future)
+
+ callback.assert_not_called()
+ start_rpc.assert_called_once()
+ should_recover.assert_called_once_with(mock.sentinel.future)
+ assert bidi_rpc.is_active
+
+ def test_done_callbacks_non_recoverable(self):
+ start_rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+ callback = mock.Mock(spec=["__call__"])
+
+ bidi_rpc.add_done_callback(callback)
+ bidi_rpc._on_call_done(mock.sentinel.future)
+
+ callback.assert_called_once_with(mock.sentinel.future)
+ should_recover.assert_called_once_with(mock.sentinel.future)
+ assert not bidi_rpc.is_active
+
+ def test_send_terminate(self):
+ cancellation = ValueError()
+ call_1 = CallStub([cancellation], active=False)
+ call_2 = CallStub([])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+ should_terminate = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc, should_recover, should_terminate=should_terminate
+ )
+
+ bidi_rpc.open()
+
+ bidi_rpc.send(mock.sentinel.request)
+
+ assert bidi_rpc.pending_requests == 1
+ assert bidi_rpc._request_queue.get() is None
+
+ should_recover.assert_not_called()
+ should_terminate.assert_called_once_with(cancellation)
+ assert bidi_rpc.call == call_1
+ assert bidi_rpc.is_active is False
+ assert call_1.cancelled is True
+
+ def test_send_recover(self):
+ error = ValueError()
+ call_1 = CallStub([error], active=False)
+ call_2 = CallStub([])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+
+ bidi_rpc.open()
+
+ bidi_rpc.send(mock.sentinel.request)
+
+ assert bidi_rpc.pending_requests == 1
+ assert bidi_rpc._request_queue.get() is mock.sentinel.request
+
+ should_recover.assert_called_once_with(error)
+ assert bidi_rpc.call == call_2
+ assert bidi_rpc.is_active is True
+
+ def test_send_failure(self):
+ error = ValueError()
+ call = CallStub([error], active=False)
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, return_value=call
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+
+ bidi_rpc.open()
+
+ with pytest.raises(ValueError) as exc_info:
+ bidi_rpc.send(mock.sentinel.request)
+
+ assert exc_info.value == error
+ should_recover.assert_called_once_with(error)
+ assert bidi_rpc.call == call
+ assert bidi_rpc.is_active is False
+ assert call.cancelled is True
+ assert bidi_rpc.pending_requests == 1
+ assert bidi_rpc._request_queue.get() is None
+
+ def test_recv_terminate(self):
+ cancellation = ValueError()
+ call = CallStub([cancellation])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, return_value=call
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+ should_terminate = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc, should_recover, should_terminate=should_terminate
+ )
+
+ bidi_rpc.open()
+
+ bidi_rpc.recv()
+
+ should_recover.assert_not_called()
+ should_terminate.assert_called_once_with(cancellation)
+ assert bidi_rpc.call == call
+ assert bidi_rpc.is_active is False
+ assert call.cancelled is True
+
+ def test_recv_recover(self):
+ error = ValueError()
+ call_1 = CallStub([1, error])
+ call_2 = CallStub([2, 3])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+
+ bidi_rpc.open()
+
+ values = []
+ for n in range(3):
+ values.append(bidi_rpc.recv())
+
+ assert values == [1, 2, 3]
+ should_recover.assert_called_once_with(error)
+ assert bidi_rpc.call == call_2
+ assert bidi_rpc.is_active is True
+
+ def test_recv_recover_already_recovered(self):
+ call_1 = CallStub([])
+ call_2 = CallStub([])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+ )
+ callback = mock.Mock()
+ callback.return_value = True
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, callback)
+
+ bidi_rpc.open()
+
+ bidi_rpc._reopen()
+
+ assert bidi_rpc.call is call_1
+ assert bidi_rpc.is_active is True
+
+ def test_recv_failure(self):
+ error = ValueError()
+ call = CallStub([error])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, return_value=call
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+
+ bidi_rpc.open()
+
+ with pytest.raises(ValueError) as exc_info:
+ bidi_rpc.recv()
+
+ assert exc_info.value == error
+ should_recover.assert_called_once_with(error)
+ assert bidi_rpc.call == call
+ assert bidi_rpc.is_active is False
+ assert call.cancelled is True
+
+ def test_close(self):
+ call = mock.create_autospec(_CallAndFuture, instance=True)
+
+ def cancel_side_effect():
+ call.is_active.return_value = False
+
+ call.cancel.side_effect = cancel_side_effect
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, return_value=call
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+ bidi_rpc.open()
+
+ bidi_rpc.close()
+
+ should_recover.assert_not_called()
+ call.cancel.assert_called_once()
+ assert bidi_rpc.call == call
+ assert bidi_rpc.is_active is False
+ # ensure the request queue was signaled to stop.
+ assert bidi_rpc.pending_requests == 1
+ assert bidi_rpc._request_queue.get() is None
+ assert bidi_rpc._finalized
+
+ def test_reopen_failure_on_rpc_restart(self):
+ error1 = ValueError("1")
+ error2 = ValueError("2")
+ call = CallStub([error1])
+ # Invoking start RPC a second time will trigger an error.
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, side_effect=[call, error2]
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=True)
+ callback = mock.Mock(spec=["__call__"])
+
+ bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
+ bidi_rpc.add_done_callback(callback)
+
+ bidi_rpc.open()
+
+ with pytest.raises(ValueError) as exc_info:
+ bidi_rpc.recv()
+
+ assert exc_info.value == error2
+ should_recover.assert_called_once_with(error1)
+ assert bidi_rpc.call is None
+ assert bidi_rpc.is_active is False
+ callback.assert_called_once_with(error2)
+
+ def test_using_throttle_on_reopen_requests(self):
+ call = CallStub([])
+ start_rpc = mock.create_autospec(
+ grpc.StreamStreamMultiCallable, instance=True, return_value=call
+ )
+ should_recover = mock.Mock(spec=["__call__"], return_value=True)
+ bidi_rpc = bidi.ResumableBidiRpc(
+ start_rpc, should_recover, throttle_reopen=True
+ )
+
+ patcher = mock.patch.object(bidi_rpc._reopen_throttle.__class__, "__enter__")
+ with patcher as mock_enter:
+ bidi_rpc._reopen()
+
+ mock_enter.assert_called_once()
+
+ def test_send_not_open(self):
+ bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False)
+
+ with pytest.raises(ValueError):
+ bidi_rpc.send(mock.sentinel.request)
+
+ def test_recv_not_open(self):
+ bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False)
+
+ with pytest.raises(ValueError):
+ bidi_rpc.recv()
+
+ def test_finalize_idempotent(self):
+ error1 = ValueError("1")
+ error2 = ValueError("2")
+ callback = mock.Mock(spec=["__call__"])
+ should_recover = mock.Mock(spec=["__call__"], return_value=False)
+
+ bidi_rpc = bidi.ResumableBidiRpc(mock.sentinel.start_rpc, should_recover)
+
+ bidi_rpc.add_done_callback(callback)
+
+ bidi_rpc._on_call_done(error1)
+ bidi_rpc._on_call_done(error2)
+
+ callback.assert_called_once_with(error1)
+
+
+class TestBackgroundConsumer(object):
+ def test_consume_once_then_exit(self):
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ bidi_rpc.recv.side_effect = [mock.sentinel.response_1]
+ recved = threading.Event()
+
+ def on_response(response):
+ assert response == mock.sentinel.response_1
+ bidi_rpc.is_active = False
+ recved.set()
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+
+ recved.wait()
+
+ bidi_rpc.recv.assert_called_once()
+ assert bidi_rpc.is_active is False
+
+ consumer.stop()
+
+ bidi_rpc.close.assert_called_once()
+ assert consumer.is_active is False
+
+ def test_pause_resume_and_close(self):
+ # This test is relatively complex. It attempts to start the consumer,
+ # consume one item, pause the consumer, check the state of the world,
+ # then resume the consumer. Doing this in a deterministic fashion
+ # requires a bit more mocking and patching than usual.
+
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+
+ def close_side_effect():
+ bidi_rpc.is_active = False
+
+ bidi_rpc.close.side_effect = close_side_effect
+
+ # These are used to coordinate the two threads to ensure deterministic
+ # execution.
+ should_continue = threading.Event()
+ responses_and_events = {
+ mock.sentinel.response_1: threading.Event(),
+ mock.sentinel.response_2: threading.Event(),
+ }
+ bidi_rpc.recv.side_effect = [mock.sentinel.response_1, mock.sentinel.response_2]
+
+ recved_responses = []
+ consumer = None
+
+ def on_response(response):
+ if response == mock.sentinel.response_1:
+ consumer.pause()
+
+ recved_responses.append(response)
+ responses_and_events[response].set()
+ should_continue.wait()
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+
+ # Wait for the first response to be recved.
+ responses_and_events[mock.sentinel.response_1].wait()
+
+ # Ensure only one item has been recved and that the consumer is paused.
+ assert recved_responses == [mock.sentinel.response_1]
+ assert consumer.is_paused is True
+ assert consumer.is_active is True
+
+ # Unpause the consumer, wait for the second item, then close the
+ # consumer.
+ should_continue.set()
+ consumer.resume()
+
+ responses_and_events[mock.sentinel.response_2].wait()
+
+ assert recved_responses == [mock.sentinel.response_1, mock.sentinel.response_2]
+
+ consumer.stop()
+
+ assert consumer.is_active is False
+
+ def test_wake_on_error(self):
+ should_continue = threading.Event()
+
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ bidi_rpc.add_done_callback.side_effect = lambda _: should_continue.set()
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, mock.sentinel.on_response)
+
+ # Start the consumer paused, which should immediately put it into wait
+ # state.
+ consumer.pause()
+ consumer.start()
+
+ # Wait for add_done_callback to be called
+ should_continue.wait()
+ bidi_rpc.add_done_callback.assert_called_once_with(consumer._on_call_done)
+
+ # The consumer should now be blocked on waiting to be unpaused.
+ assert consumer.is_active
+ assert consumer.is_paused
+
+ # Trigger the done callback, it should unpause the consumer and cause
+ # it to exit.
+ bidi_rpc.is_active = False
+ consumer._on_call_done(bidi_rpc)
+
+ # It may take a few cycles for the thread to exit.
+ while consumer.is_active:
+ pass
+
+ def test_consumer_expected_error(self, caplog):
+ caplog.set_level(logging.DEBUG)
+
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable("Gone away")
+
+ on_response = mock.Mock(spec=["__call__"])
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+
+ # Wait for the consumer's thread to exit.
+ while consumer.is_active:
+ pass
+
+ on_response.assert_not_called()
+ bidi_rpc.recv.assert_called_once()
+ assert "caught error" in caplog.text
+
+ def test_consumer_unexpected_error(self, caplog):
+ caplog.set_level(logging.DEBUG)
+
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ bidi_rpc.recv.side_effect = ValueError()
+
+ on_response = mock.Mock(spec=["__call__"])
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+
+ # Wait for the consumer's thread to exit.
+ while consumer.is_active:
+ pass # pragma: NO COVER (race condition)
+
+ on_response.assert_not_called()
+ bidi_rpc.recv.assert_called_once()
+ assert "caught unexpected exception" in caplog.text
+
+ def test_double_stop(self, caplog):
+ caplog.set_level(logging.DEBUG)
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ on_response = mock.Mock(spec=["__call__"])
+
+ def close_side_effect():
+ bidi_rpc.is_active = False
+
+ bidi_rpc.close.side_effect = close_side_effect
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+ assert consumer.is_active is True
+
+ consumer.stop()
+ assert consumer.is_active is False
+
+ # calling stop twice should not result in an error.
+ consumer.stop()
diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py
new file mode 100644
index 0000000..f5eebfb
--- /dev/null
+++ b/tests/unit/test_client_info.py
@@ -0,0 +1,98 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+try:
+ import grpc
+except ImportError:
+ grpc = None
+
+from google.api_core import client_info
+
+
+def test_constructor_defaults():
+ info = client_info.ClientInfo()
+
+ assert info.python_version is not None
+
+ if grpc is not None:
+ assert info.grpc_version is not None
+ else:
+ assert info.grpc_version is None
+
+ assert info.api_core_version is not None
+ assert info.gapic_version is None
+ assert info.client_library_version is None
+ assert info.rest_version is None
+
+
+def test_constructor_options():
+ info = client_info.ClientInfo(
+ python_version="1",
+ grpc_version="2",
+ api_core_version="3",
+ gapic_version="4",
+ client_library_version="5",
+ user_agent="6",
+ rest_version="7",
+ )
+
+ assert info.python_version == "1"
+ assert info.grpc_version == "2"
+ assert info.api_core_version == "3"
+ assert info.gapic_version == "4"
+ assert info.client_library_version == "5"
+ assert info.user_agent == "6"
+ assert info.rest_version == "7"
+
+
+def test_to_user_agent_minimal():
+ info = client_info.ClientInfo(
+ python_version="1", api_core_version="2", grpc_version=None
+ )
+
+ user_agent = info.to_user_agent()
+
+ assert user_agent == "gl-python/1 gax/2"
+
+
+def test_to_user_agent_full():
+ info = client_info.ClientInfo(
+ python_version="1",
+ grpc_version="2",
+ api_core_version="3",
+ gapic_version="4",
+ client_library_version="5",
+ user_agent="app-name/1.0",
+ )
+
+ user_agent = info.to_user_agent()
+
+ assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5"
+
+
+def test_to_user_agent_rest():
+ info = client_info.ClientInfo(
+ python_version="1",
+ grpc_version=None,
+ rest_version="2",
+ api_core_version="3",
+ gapic_version="4",
+ client_library_version="5",
+ user_agent="app-name/1.0",
+ )
+
+ user_agent = info.to_user_agent()
+
+ assert user_agent == "app-name/1.0 gl-python/1 rest/2 gax/3 gapic/4 gccl/5"
diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py
new file mode 100644
index 0000000..38b9ad0
--- /dev/null
+++ b/tests/unit/test_client_options.py
@@ -0,0 +1,117 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import client_options
+
+
+def get_client_cert():
+ return b"cert", b"key"
+
+
+def get_client_encrypted_cert():
+ return "cert_path", "key_path", b"passphrase"
+
+
+def test_constructor():
+
+ options = client_options.ClientOptions(
+ api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert,
+ quota_project_id="quote-proj",
+ credentials_file="path/to/credentials.json",
+ scopes=[
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ],
+ )
+
+ assert options.api_endpoint == "foo.googleapis.com"
+ assert options.client_cert_source() == (b"cert", b"key")
+ assert options.quota_project_id == "quote-proj"
+ assert options.credentials_file == "path/to/credentials.json"
+ assert options.scopes == [
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ]
+
+
+def test_constructor_with_encrypted_cert_source():
+
+ options = client_options.ClientOptions(
+ api_endpoint="foo.googleapis.com",
+ client_encrypted_cert_source=get_client_encrypted_cert,
+ )
+
+ assert options.api_endpoint == "foo.googleapis.com"
+ assert options.client_encrypted_cert_source() == (
+ "cert_path",
+ "key_path",
+ b"passphrase",
+ )
+
+
+def test_constructor_with_both_cert_sources():
+ with pytest.raises(ValueError):
+ client_options.ClientOptions(
+ api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert,
+ client_encrypted_cert_source=get_client_encrypted_cert,
+ )
+
+
+def test_from_dict():
+ options = client_options.from_dict(
+ {
+ "api_endpoint": "foo.googleapis.com",
+ "client_cert_source": get_client_cert,
+ "quota_project_id": "quote-proj",
+ "credentials_file": "path/to/credentials.json",
+ "scopes": [
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ],
+ }
+ )
+
+ assert options.api_endpoint == "foo.googleapis.com"
+ assert options.client_cert_source() == (b"cert", b"key")
+ assert options.quota_project_id == "quote-proj"
+ assert options.credentials_file == "path/to/credentials.json"
+ assert options.scopes == [
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ]
+
+
+def test_from_dict_bad_argument():
+ with pytest.raises(ValueError):
+ client_options.from_dict(
+ {
+ "api_endpoint": "foo.googleapis.com",
+ "bad_arg": "1234",
+ "client_cert_source": get_client_cert,
+ }
+ )
+
+
+def test_repr():
+ options = client_options.ClientOptions(api_endpoint="foo.googleapis.com")
+
+ assert (
+ repr(options)
+ == "ClientOptions: {'api_endpoint': 'foo.googleapis.com', 'client_cert_source': None, 'client_encrypted_cert_source': None}"
+ or "ClientOptions: {'client_encrypted_cert_source': None, 'client_cert_source': None, 'api_endpoint': 'foo.googleapis.com'}"
+ )
diff --git a/tests/unit/test_datetime_helpers.py b/tests/unit/test_datetime_helpers.py
new file mode 100644
index 0000000..5f5470a
--- /dev/null
+++ b/tests/unit/test_datetime_helpers.py
@@ -0,0 +1,396 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import calendar
+import datetime
+
+import pytest
+
+from google.api_core import datetime_helpers
+from google.protobuf import timestamp_pb2
+
+
+ONE_MINUTE_IN_MICROSECONDS = 60 * 1e6
+
+
+def test_utcnow():
+ result = datetime_helpers.utcnow()
+ assert isinstance(result, datetime.datetime)
+
+
+def test_to_milliseconds():
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=datetime.timezone.utc)
+ assert datetime_helpers.to_milliseconds(dt) == 1000
+
+
+def test_to_microseconds():
+ microseconds = 314159
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 0, microsecond=microseconds)
+ assert datetime_helpers.to_microseconds(dt) == microseconds
+
+
+def test_to_microseconds_non_utc():
+ zone = datetime.timezone(datetime.timedelta(minutes=-1))
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=zone)
+ assert datetime_helpers.to_microseconds(dt) == ONE_MINUTE_IN_MICROSECONDS
+
+
+def test_to_microseconds_naive():
+ microseconds = 314159
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 0, microsecond=microseconds, tzinfo=None)
+ assert datetime_helpers.to_microseconds(dt) == microseconds
+
+
+def test_from_microseconds():
+ five_mins_from_epoch_in_microseconds = 5 * ONE_MINUTE_IN_MICROSECONDS
+ five_mins_from_epoch_datetime = datetime.datetime(
+ 1970, 1, 1, 0, 5, 0, tzinfo=datetime.timezone.utc
+ )
+
+ result = datetime_helpers.from_microseconds(five_mins_from_epoch_in_microseconds)
+
+ assert result == five_mins_from_epoch_datetime
+
+
+def test_from_iso8601_date():
+ today = datetime.date.today()
+ iso_8601_today = today.strftime("%Y-%m-%d")
+
+ assert datetime_helpers.from_iso8601_date(iso_8601_today) == today
+
+
+def test_from_iso8601_time():
+ assert datetime_helpers.from_iso8601_time("12:09:42") == datetime.time(12, 9, 42)
+
+
+def test_from_rfc3339():
+ value = "2009-12-17T12:44:32.123456Z"
+ assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
+ 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
+ )
+
+
+def test_from_rfc3339_nanos():
+ value = "2009-12-17T12:44:32.123456Z"
+ assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
+ 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
+ )
+
+
+def test_from_rfc3339_without_nanos():
+ value = "2009-12-17T12:44:32Z"
+ assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
+ 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
+ )
+
+
+def test_from_rfc3339_nanos_without_nanos():
+ value = "2009-12-17T12:44:32Z"
+ assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
+ 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
+ )
+
+
+@pytest.mark.parametrize(
+ "truncated, micros",
+ [
+ ("12345678", 123456),
+ ("1234567", 123456),
+ ("123456", 123456),
+ ("12345", 123450),
+ ("1234", 123400),
+ ("123", 123000),
+ ("12", 120000),
+ ("1", 100000),
+ ],
+)
+def test_from_rfc3339_with_truncated_nanos(truncated, micros):
+ value = "2009-12-17T12:44:32.{}Z".format(truncated)
+ assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
+ 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
+ )
+
+
+def test_from_rfc3339_nanos_is_deprecated():
+ value = "2009-12-17T12:44:32.123456Z"
+
+ result = datetime_helpers.from_rfc3339(value)
+ result_nanos = datetime_helpers.from_rfc3339_nanos(value)
+
+ assert result == result_nanos
+
+
+@pytest.mark.parametrize(
+ "truncated, micros",
+ [
+ ("12345678", 123456),
+ ("1234567", 123456),
+ ("123456", 123456),
+ ("12345", 123450),
+ ("1234", 123400),
+ ("123", 123000),
+ ("12", 120000),
+ ("1", 100000),
+ ],
+)
+def test_from_rfc3339_nanos_with_truncated_nanos(truncated, micros):
+ value = "2009-12-17T12:44:32.{}Z".format(truncated)
+ assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
+ 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
+ )
+
+
+def test_from_rfc3339_wo_nanos_raise_exception():
+ value = "2009-12-17T12:44:32"
+ with pytest.raises(ValueError):
+ datetime_helpers.from_rfc3339(value)
+
+
+def test_from_rfc3339_w_nanos_raise_exception():
+ value = "2009-12-17T12:44:32.123456"
+ with pytest.raises(ValueError):
+ datetime_helpers.from_rfc3339(value)
+
+
+def test_to_rfc3339():
+ value = datetime.datetime(2016, 4, 5, 13, 30, 0)
+ expected = "2016-04-05T13:30:00.000000Z"
+ assert datetime_helpers.to_rfc3339(value) == expected
+
+
+def test_to_rfc3339_with_utc():
+ value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=datetime.timezone.utc)
+ expected = "2016-04-05T13:30:00.000000Z"
+ assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
+
+
+def test_to_rfc3339_with_non_utc():
+ zone = datetime.timezone(datetime.timedelta(minutes=-60))
+ value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
+ expected = "2016-04-05T14:30:00.000000Z"
+ assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
+
+
+def test_to_rfc3339_with_non_utc_ignore_zone():
+ zone = datetime.timezone(datetime.timedelta(minutes=-60))
+ value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
+ expected = "2016-04-05T13:30:00.000000Z"
+ assert datetime_helpers.to_rfc3339(value, ignore_zone=True) == expected
+
+
+class Test_DateTimeWithNanos(object):
+ @staticmethod
+ def test_ctor_wo_nanos():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, 123456
+ )
+ assert stamp.year == 2016
+ assert stamp.month == 12
+ assert stamp.day == 20
+ assert stamp.hour == 21
+ assert stamp.minute == 13
+ assert stamp.second == 47
+ assert stamp.microsecond == 123456
+ assert stamp.nanosecond == 0
+
+ @staticmethod
+ def test_ctor_w_nanos():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789
+ )
+ assert stamp.year == 2016
+ assert stamp.month == 12
+ assert stamp.day == 20
+ assert stamp.hour == 21
+ assert stamp.minute == 13
+ assert stamp.second == 47
+ assert stamp.microsecond == 123456
+ assert stamp.nanosecond == 123456789
+
+ @staticmethod
+ def test_ctor_w_micros_positional_and_nanos():
+ with pytest.raises(TypeError):
+ datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, 123456, nanosecond=123456789
+ )
+
+ @staticmethod
+ def test_ctor_w_micros_keyword_and_nanos():
+ with pytest.raises(TypeError):
+ datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, microsecond=123456, nanosecond=123456789
+ )
+
+ @staticmethod
+ def test_rfc3339_wo_nanos():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, 123456
+ )
+ assert stamp.rfc3339() == "2016-12-20T21:13:47.123456Z"
+
+ @staticmethod
+ def test_rfc3339_wo_nanos_w_leading_zero():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(2016, 12, 20, 21, 13, 47, 1234)
+ assert stamp.rfc3339() == "2016-12-20T21:13:47.001234Z"
+
+ @staticmethod
+ def test_rfc3339_w_nanos():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789
+ )
+ assert stamp.rfc3339() == "2016-12-20T21:13:47.123456789Z"
+
+ @staticmethod
+ def test_rfc3339_w_nanos_w_leading_zero():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=1234567
+ )
+ assert stamp.rfc3339() == "2016-12-20T21:13:47.001234567Z"
+
+ @staticmethod
+ def test_rfc3339_w_nanos_no_trailing_zeroes():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=100000000
+ )
+ assert stamp.rfc3339() == "2016-12-20T21:13:47.1Z"
+
+ @staticmethod
+ def test_rfc3339_w_nanos_w_leading_zero_and_no_trailing_zeros():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=1234500
+ )
+ assert stamp.rfc3339() == "2016-12-20T21:13:47.0012345Z"
+
+ @staticmethod
+ def test_from_rfc3339_w_invalid():
+ stamp = "2016-12-20T21:13:47"
+ with pytest.raises(ValueError):
+ datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(stamp)
+
+ @staticmethod
+ def test_from_rfc3339_wo_fraction():
+ timestamp = "2016-12-20T21:13:47Z"
+ expected = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, tzinfo=datetime.timezone.utc
+ )
+ stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
+ assert stamp == expected
+
+ @staticmethod
+ def test_from_rfc3339_w_partial_precision():
+ timestamp = "2016-12-20T21:13:47.1Z"
+ expected = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=datetime.timezone.utc
+ )
+ stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
+ assert stamp == expected
+
+ @staticmethod
+ def test_from_rfc3339_w_full_precision():
+ timestamp = "2016-12-20T21:13:47.123456789Z"
+ expected = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
+ )
+ stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
+ assert stamp == expected
+
+ @staticmethod
+ @pytest.mark.parametrize(
+ "fractional, nanos",
+ [
+ ("12345678", 123456780),
+ ("1234567", 123456700),
+ ("123456", 123456000),
+ ("12345", 123450000),
+ ("1234", 123400000),
+ ("123", 123000000),
+ ("12", 120000000),
+ ("1", 100000000),
+ ],
+ )
+ def test_from_rfc3339_test_nanoseconds(fractional, nanos):
+ value = "2009-12-17T12:44:32.{}Z".format(fractional)
+ assert (
+ datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(value).nanosecond
+ == nanos
+ )
+
+ @staticmethod
+ def test_timestamp_pb_wo_nanos_naive():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, 123456
+ )
+ delta = (
+ stamp.replace(tzinfo=datetime.timezone.utc) - datetime_helpers._UTC_EPOCH
+ )
+ seconds = int(delta.total_seconds())
+ nanos = 123456000
+ timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+ assert stamp.timestamp_pb() == timestamp
+
+ @staticmethod
+ def test_timestamp_pb_w_nanos():
+ stamp = datetime_helpers.DatetimeWithNanoseconds(
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
+ )
+ delta = stamp - datetime_helpers._UTC_EPOCH
+ timestamp = timestamp_pb2.Timestamp(
+ seconds=int(delta.total_seconds()), nanos=123456789
+ )
+ assert stamp.timestamp_pb() == timestamp
+
+ @staticmethod
+ def test_from_timestamp_pb_wo_nanos():
+ when = datetime.datetime(
+ 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
+ )
+ delta = when - datetime_helpers._UTC_EPOCH
+ seconds = int(delta.total_seconds())
+ timestamp = timestamp_pb2.Timestamp(seconds=seconds)
+
+ stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(timestamp)
+
+ assert _to_seconds(when) == _to_seconds(stamp)
+ assert stamp.microsecond == 0
+ assert stamp.nanosecond == 0
+ assert stamp.tzinfo == datetime.timezone.utc
+
+ @staticmethod
+ def test_from_timestamp_pb_w_nanos():
+ when = datetime.datetime(
+ 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
+ )
+ delta = when - datetime_helpers._UTC_EPOCH
+ seconds = int(delta.total_seconds())
+ timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=123456789)
+
+ stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(timestamp)
+
+ assert _to_seconds(when) == _to_seconds(stamp)
+ assert stamp.microsecond == 123456
+ assert stamp.nanosecond == 123456789
+ assert stamp.tzinfo == datetime.timezone.utc
+
+
+def _to_seconds(value):
+ """Convert a datetime to seconds since the unix epoch.
+
+ Args:
+ value (datetime.datetime): The datetime to covert.
+
+ Returns:
+ int: Microseconds since the unix epoch.
+ """
+ assert value.tzinfo is datetime.timezone.utc
+ return calendar.timegm(value.timetuple())
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
new file mode 100644
index 0000000..622f58a
--- /dev/null
+++ b/tests/unit/test_exceptions.py
@@ -0,0 +1,353 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import http.client
+import json
+
+import mock
+import pytest
+import requests
+
+try:
+ import grpc
+ from grpc_status import rpc_status
+except ImportError:
+ grpc = rpc_status = None
+
+from google.api_core import exceptions
+from google.protobuf import any_pb2, json_format
+from google.rpc import error_details_pb2, status_pb2
+
+
+def test_create_google_cloud_error():
+ exception = exceptions.GoogleAPICallError("Testing")
+ exception.code = 600
+ assert str(exception) == "600 Testing"
+ assert exception.message == "Testing"
+ assert exception.errors == []
+ assert exception.response is None
+
+
+def test_create_google_cloud_error_with_args():
+ error = {
+ "code": 600,
+ "message": "Testing",
+ }
+ response = mock.sentinel.response
+ exception = exceptions.GoogleAPICallError("Testing", [error], response=response)
+ exception.code = 600
+ assert str(exception) == "600 Testing"
+ assert exception.message == "Testing"
+ assert exception.errors == [error]
+ assert exception.response == response
+
+
+def test_from_http_status():
+ message = "message"
+ exception = exceptions.from_http_status(http.client.NOT_FOUND, message)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == message
+ assert exception.errors == []
+
+
+def test_from_http_status_with_errors_and_response():
+ message = "message"
+ errors = ["1", "2"]
+ response = mock.sentinel.response
+ exception = exceptions.from_http_status(
+ http.client.NOT_FOUND, message, errors=errors, response=response
+ )
+
+ assert isinstance(exception, exceptions.NotFound)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == message
+ assert exception.errors == errors
+ assert exception.response == response
+
+
+def test_from_http_status_unknown_code():
+ message = "message"
+ status_code = 156
+ exception = exceptions.from_http_status(status_code, message)
+ assert exception.code == status_code
+ assert exception.message == message
+
+
+def make_response(content):
+ response = requests.Response()
+ response._content = content
+ response.status_code = http.client.NOT_FOUND
+ response.request = requests.Request(
+ method="POST", url="https://example.com"
+ ).prepare()
+ return response
+
+
+def test_from_http_response_no_content():
+ response = make_response(None)
+
+ exception = exceptions.from_http_response(response)
+
+ assert isinstance(exception, exceptions.NotFound)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: unknown error"
+ assert exception.response == response
+
+
+def test_from_http_response_text_content():
+ response = make_response(b"message")
+ response.encoding = "UTF8" # suppress charset_normalizer warning
+
+ exception = exceptions.from_http_response(response)
+
+ assert isinstance(exception, exceptions.NotFound)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: message"
+
+
+def test_from_http_response_json_content():
+ response = make_response(
+ json.dumps({"error": {"message": "json message", "errors": ["1", "2"]}}).encode(
+ "utf-8"
+ )
+ )
+
+ exception = exceptions.from_http_response(response)
+
+ assert isinstance(exception, exceptions.NotFound)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: json message"
+ assert exception.errors == ["1", "2"]
+
+
+def test_from_http_response_bad_json_content():
+ response = make_response(json.dumps({"meep": "moop"}).encode("utf-8"))
+
+ exception = exceptions.from_http_response(response)
+
+ assert isinstance(exception, exceptions.NotFound)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: unknown error"
+
+
+def test_from_http_response_json_unicode_content():
+ response = make_response(
+ json.dumps(
+ {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
+ ).encode("utf-8")
+ )
+
+ exception = exceptions.from_http_response(response)
+
+ assert isinstance(exception, exceptions.NotFound)
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: \u2019 message"
+ assert exception.errors == ["1", "2"]
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_status():
+ message = "message"
+ exception = exceptions.from_grpc_status(grpc.StatusCode.OUT_OF_RANGE, message)
+ assert isinstance(exception, exceptions.BadRequest)
+ assert isinstance(exception, exceptions.OutOfRange)
+ assert exception.code == http.client.BAD_REQUEST
+ assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
+ assert exception.message == message
+ assert exception.errors == []
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_status_as_int():
+ message = "message"
+ exception = exceptions.from_grpc_status(11, message)
+ assert isinstance(exception, exceptions.BadRequest)
+ assert isinstance(exception, exceptions.OutOfRange)
+ assert exception.code == http.client.BAD_REQUEST
+ assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
+ assert exception.message == message
+ assert exception.errors == []
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_status_with_errors_and_response():
+ message = "message"
+ response = mock.sentinel.response
+ errors = ["1", "2"]
+ exception = exceptions.from_grpc_status(
+ grpc.StatusCode.OUT_OF_RANGE, message, errors=errors, response=response
+ )
+
+ assert isinstance(exception, exceptions.OutOfRange)
+ assert exception.message == message
+ assert exception.errors == errors
+ assert exception.response == response
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_status_unknown_code():
+ message = "message"
+ exception = exceptions.from_grpc_status(grpc.StatusCode.OK, message)
+ assert exception.grpc_status_code == grpc.StatusCode.OK
+ assert exception.message == message
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_error():
+ message = "message"
+ error = mock.create_autospec(grpc.Call, instance=True)
+ error.code.return_value = grpc.StatusCode.INVALID_ARGUMENT
+ error.details.return_value = message
+
+ exception = exceptions.from_grpc_error(error)
+
+ assert isinstance(exception, exceptions.BadRequest)
+ assert isinstance(exception, exceptions.InvalidArgument)
+ assert exception.code == http.client.BAD_REQUEST
+ assert exception.grpc_status_code == grpc.StatusCode.INVALID_ARGUMENT
+ assert exception.message == message
+ assert exception.errors == [error]
+ assert exception.response == error
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_error_non_call():
+ message = "message"
+ error = mock.create_autospec(grpc.RpcError, instance=True)
+ error.__str__.return_value = message
+
+ exception = exceptions.from_grpc_error(error)
+
+ assert isinstance(exception, exceptions.GoogleAPICallError)
+ assert exception.code is None
+ assert exception.grpc_status_code is None
+ assert exception.message == message
+ assert exception.errors == [error]
+ assert exception.response == error
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_error_bare_call():
+ message = "Testing"
+
+ class TestingError(grpc.Call, grpc.RpcError):
+ def __init__(self, exception):
+ self.exception = exception
+
+ def code(self):
+ return self.exception.grpc_status_code
+
+ def details(self):
+ return message
+
+ nested_message = "message"
+ error = TestingError(exceptions.GoogleAPICallError(nested_message))
+
+ exception = exceptions.from_grpc_error(error)
+
+ assert isinstance(exception, exceptions.GoogleAPICallError)
+ assert exception.code is None
+ assert exception.grpc_status_code is None
+ assert exception.message == message
+ assert exception.errors == [error]
+ assert exception.response == error
+ assert exception.details == []
+
+
+def create_bad_request_details():
+ bad_request_details = error_details_pb2.BadRequest()
+ field_violation = bad_request_details.field_violations.add()
+ field_violation.field = "document.content"
+ field_violation.description = "Must have some text content to annotate."
+ status_detail = any_pb2.Any()
+ status_detail.Pack(bad_request_details)
+ return status_detail
+
+
+def test_error_details_from_rest_response():
+ bad_request_detail = create_bad_request_details()
+ status = status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status.details.append(bad_request_detail)
+
+ # See JSON schema in https://cloud.google.com/apis/design/errors#http_mapping
+ http_response = make_response(
+ json.dumps({"error": json.loads(json_format.MessageToJson(status))}).encode(
+ "utf-8"
+ )
+ )
+ exception = exceptions.from_http_response(http_response)
+ want_error_details = [json.loads(json_format.MessageToJson(bad_request_detail))]
+ assert want_error_details == exception.details
+ # 404 POST comes from make_response.
+ assert str(exception) == (
+ "404 POST https://example.com/: 3 INVALID_ARGUMENT:"
+ " One of content, or gcs_content_uri must be set."
+ " [{'@type': 'type.googleapis.com/google.rpc.BadRequest',"
+ " 'fieldViolations': [{'field': 'document.content',"
+ " 'description': 'Must have some text content to annotate.'}]}]"
+ )
+
+
+def test_error_details_from_v1_rest_response():
+ response = make_response(
+ json.dumps(
+ {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
+ ).encode("utf-8")
+ )
+ exception = exceptions.from_http_response(response)
+ assert exception.details == []
+
+
+@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
+def test_error_details_from_grpc_response():
+ status = rpc_status.status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status_detail = create_bad_request_details()
+ status.details.append(status_detail)
+
+ # Actualy error doesn't matter as long as its grpc.Call,
+ # because from_call is mocked.
+ error = mock.create_autospec(grpc.Call, instance=True)
+ with mock.patch("grpc_status.rpc_status.from_call") as m:
+ m.return_value = status
+ exception = exceptions.from_grpc_error(error)
+
+ bad_request_detail = error_details_pb2.BadRequest()
+ status_detail.Unpack(bad_request_detail)
+ assert exception.details == [bad_request_detail]
+
+
+@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
+def test_error_details_from_grpc_response_unknown_error():
+ status_detail = any_pb2.Any()
+
+ status = rpc_status.status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status.details.append(status_detail)
+
+ error = mock.create_autospec(grpc.Call, instance=True)
+ with mock.patch("grpc_status.rpc_status.from_call") as m:
+ m.return_value = status
+ exception = exceptions.from_grpc_error(error)
+ assert exception.details == [status_detail]
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
new file mode 100644
index 0000000..ca969e4
--- /dev/null
+++ b/tests/unit/test_grpc_helpers.py
@@ -0,0 +1,860 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import pytest
+
+try:
+ import grpc
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core import grpc_helpers
+import google.auth.credentials
+from google.longrunning import operations_pb2
+
+
+def test__patch_callable_name():
+ callable = mock.Mock(spec=["__class__"])
+ callable.__class__ = mock.Mock(spec=["__name__"])
+ callable.__class__.__name__ = "TestCallable"
+
+ grpc_helpers._patch_callable_name(callable)
+
+ assert callable.__name__ == "TestCallable"
+
+
+def test__patch_callable_name_no_op():
+ callable = mock.Mock(spec=["__name__"])
+ callable.__name__ = "test_callable"
+
+ grpc_helpers._patch_callable_name(callable)
+
+ assert callable.__name__ == "test_callable"
+
+
+class RpcErrorImpl(grpc.RpcError, grpc.Call):
+ def __init__(self, code):
+ super(RpcErrorImpl, self).__init__()
+ self._code = code
+
+ def code(self):
+ return self._code
+
+ def details(self):
+ return None
+
+ def trailing_metadata(self):
+ return None
+
+
+def test_wrap_unary_errors():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+ callable_ = mock.Mock(spec=["__call__"], side_effect=grpc_error)
+
+ wrapped_callable = grpc_helpers._wrap_unary_errors(callable_)
+
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ wrapped_callable(1, 2, three="four")
+
+ callable_.assert_called_once_with(1, 2, three="four")
+ assert exc_info.value.response == grpc_error
+
+
+class Test_StreamingResponseIterator:
+ @staticmethod
+ def _make_wrapped(*items):
+ return iter(items)
+
+ @staticmethod
+ def _make_one(wrapped, **kw):
+ return grpc_helpers._StreamingResponseIterator(wrapped, **kw)
+
+ def test_ctor_defaults(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert iterator._stored_first_result == "a"
+ assert list(wrapped) == ["b", "c"]
+
+ def test_ctor_explicit(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert getattr(iterator, "_stored_first_result", self) is self
+ assert list(wrapped) == ["a", "b", "c"]
+
+ def test_ctor_w_rpc_error_on_prefetch(self):
+ wrapped = mock.MagicMock()
+ wrapped.__next__.side_effect = grpc.RpcError()
+
+ with pytest.raises(grpc.RpcError):
+ self._make_one(wrapped)
+
+ def test___iter__(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert iter(iterator) is iterator
+
+ def test___next___w_cached_first_result(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert next(iterator) == "a"
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert next(iterator) == "b"
+ assert next(iterator) == "c"
+
+ def test___next___wo_cached_first_result(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert next(iterator) == "a"
+ assert next(iterator) == "b"
+ assert next(iterator) == "c"
+
+ def test___next___w_rpc_error(self):
+ wrapped = mock.MagicMock()
+ wrapped.__next__.side_effect = grpc.RpcError()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ with pytest.raises(exceptions.GoogleAPICallError):
+ next(iterator)
+
+ def test_add_callback(self):
+ wrapped = mock.MagicMock()
+ callback = mock.Mock(spec={})
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.add_callback(callback) is wrapped.add_callback.return_value
+
+ wrapped.add_callback.assert_called_once_with(callback)
+
+ def test_cancel(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.cancel() is wrapped.cancel.return_value
+
+ wrapped.cancel.assert_called_once_with()
+
+ def test_code(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.code() is wrapped.code.return_value
+
+ wrapped.code.assert_called_once_with()
+
+ def test_details(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.details() is wrapped.details.return_value
+
+ wrapped.details.assert_called_once_with()
+
+ def test_initial_metadata(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.initial_metadata() is wrapped.initial_metadata.return_value
+
+ wrapped.initial_metadata.assert_called_once_with()
+
+ def test_is_active(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.is_active() is wrapped.is_active.return_value
+
+ wrapped.is_active.assert_called_once_with()
+
+ def test_time_remaining(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.time_remaining() is wrapped.time_remaining.return_value
+
+ wrapped.time_remaining.assert_called_once_with()
+
+ def test_trailing_metadata(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.trailing_metadata() is wrapped.trailing_metadata.return_value
+
+ wrapped.trailing_metadata.assert_called_once_with()
+
+
+def test_wrap_stream_okay():
+ expected_responses = [1, 2, 3]
+ callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+
+ got_iterator = wrapped_callable(1, 2, three="four")
+
+ responses = list(got_iterator)
+
+ callable_.assert_called_once_with(1, 2, three="four")
+ assert responses == expected_responses
+
+
+def test_wrap_stream_prefetch_disabled():
+ responses = [1, 2, 3]
+ iter_responses = iter(responses)
+ callable_ = mock.Mock(spec=["__call__"], return_value=iter_responses)
+ callable_._prefetch_first_result_ = False
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+ wrapped_callable(1, 2, three="four")
+
+ assert list(iter_responses) == responses # no items should have been pre-fetched
+ callable_.assert_called_once_with(1, 2, three="four")
+
+
+def test_wrap_stream_iterable_iterface():
+ response_iter = mock.create_autospec(grpc.Call, instance=True)
+ callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+
+ got_iterator = wrapped_callable()
+
+ callable_.assert_called_once_with()
+
+ # Check each aliased method in the grpc.Call interface
+ got_iterator.add_callback(mock.sentinel.callback)
+ response_iter.add_callback.assert_called_once_with(mock.sentinel.callback)
+
+ got_iterator.cancel()
+ response_iter.cancel.assert_called_once_with()
+
+ got_iterator.code()
+ response_iter.code.assert_called_once_with()
+
+ got_iterator.details()
+ response_iter.details.assert_called_once_with()
+
+ got_iterator.initial_metadata()
+ response_iter.initial_metadata.assert_called_once_with()
+
+ got_iterator.is_active()
+ response_iter.is_active.assert_called_once_with()
+
+ got_iterator.time_remaining()
+ response_iter.time_remaining.assert_called_once_with()
+
+ got_iterator.trailing_metadata()
+ response_iter.trailing_metadata.assert_called_once_with()
+
+
+def test_wrap_stream_errors_invocation():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
+ callable_ = mock.Mock(spec=["__call__"], side_effect=grpc_error)
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+
+ with pytest.raises(exceptions.InvalidArgument) as exc_info:
+ wrapped_callable(1, 2, three="four")
+
+ callable_.assert_called_once_with(1, 2, three="four")
+ assert exc_info.value.response == grpc_error
+
+
+def test_wrap_stream_empty_iterator():
+ expected_responses = []
+ callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+
+ got_iterator = wrapped_callable()
+
+ responses = list(got_iterator)
+
+ callable_.assert_called_once_with()
+ assert responses == expected_responses
+
+
+class RpcResponseIteratorImpl(object):
+ def __init__(self, iterable):
+ self._iterable = iter(iterable)
+
+ def next(self):
+ next_item = next(self._iterable)
+ if isinstance(next_item, RpcErrorImpl):
+ raise next_item
+ return next_item
+
+ __next__ = next
+
+
+def test_wrap_stream_errors_iterator_initialization():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.UNAVAILABLE)
+ response_iter = RpcResponseIteratorImpl([grpc_error])
+ callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+
+ with pytest.raises(exceptions.ServiceUnavailable) as exc_info:
+ wrapped_callable(1, 2, three="four")
+
+ callable_.assert_called_once_with(1, 2, three="four")
+ assert exc_info.value.response == grpc_error
+
+
+def test_wrap_stream_errors_during_iteration():
+ grpc_error = RpcErrorImpl(grpc.StatusCode.UNAVAILABLE)
+ response_iter = RpcResponseIteratorImpl([1, grpc_error])
+ callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
+
+ wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
+ got_iterator = wrapped_callable(1, 2, three="four")
+ next(got_iterator)
+
+ with pytest.raises(exceptions.ServiceUnavailable) as exc_info:
+ next(got_iterator)
+
+ callable_.assert_called_once_with(1, 2, three="four")
+ assert exc_info.value.response == grpc_error
+
+
+@mock.patch("google.api_core.grpc_helpers._wrap_unary_errors")
+def test_wrap_errors_non_streaming(wrap_unary_errors):
+ callable_ = mock.create_autospec(grpc.UnaryUnaryMultiCallable)
+
+ result = grpc_helpers.wrap_errors(callable_)
+
+ assert result == wrap_unary_errors.return_value
+ wrap_unary_errors.assert_called_once_with(callable_)
+
+
+@mock.patch("google.api_core.grpc_helpers._wrap_stream_errors")
+def test_wrap_errors_streaming(wrap_stream_errors):
+ callable_ = mock.create_autospec(grpc.UnaryStreamMultiCallable)
+
+ result = grpc_helpers.wrap_errors(callable_)
+
+ assert result == wrap_stream_errors.return_value
+ wrap_stream_errors.assert_called_once_with(callable_)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target)
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
+
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
+@mock.patch(
+ "google.auth.transport.requests.Request",
+ autospec=True,
+ return_value=mock.sentinel.Request,
+)
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit_with_default_host(
+ grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin
+):
+ target = "example.com:443"
+ default_host = "example.com"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target, default_host=default_host)
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
+ auth_metadata_plugin.assert_called_once_with(
+ mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
+ )
+
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit_with_ssl_creds(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+
+ ssl_creds = grpc.ssl_channel_credentials()
+
+ grpc_helpers.create_channel(target, ssl_credentials=ssl_creds)
+
+ default.assert_called_once_with(scopes=None, default_scopes=None)
+
+ composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
+ composite_creds = composite_creds_call.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit_with_scopes(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target, scopes=["one", "two"])
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
+
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch(
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit_with_default_scopes(
+ grpc_secure_channel, default, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target, default_scopes=["three", "four"])
+
+ assert channel is grpc_secure_channel.return_value
+
+ default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
+
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+def test_create_channel_explicit_with_duplicate_credentials():
+ target = "example.com:443"
+
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ grpc_helpers.create_channel(
+ target,
+ credentials_file="credentials.json",
+ credentials=mock.sentinel.credentials,
+ )
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target, credentials=mock.sentinel.credentials)
+
+ auth_creds.assert_called_once_with(
+ mock.sentinel.credentials, scopes=None, default_scopes=None
+ )
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
+ target = "example.com:443"
+ scopes = ["1", "2"]
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ channel = grpc_helpers.create_channel(
+ target, credentials=credentials, scopes=scopes
+ )
+
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit_default_scopes(
+ grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ channel = grpc_helpers.create_channel(
+ target, credentials=credentials, default_scopes=default_scopes
+ )
+
+ credentials.with_scopes.assert_called_once_with(
+ scopes=None, default_scopes=default_scopes
+ )
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit_with_quota_project(
+ grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ composite_creds = composite_creds_call.return_value
+
+ credentials = mock.create_autospec(
+ google.auth.credentials.CredentialsWithQuotaProject, instance=True
+ )
+
+ channel = grpc_helpers.create_channel(
+ target, credentials=credentials, quota_project_id="project-foo"
+ )
+
+ credentials.with_quota_project.assert_called_once_with("project-foo")
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(target, credentials_file=credentials_file)
+
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=None
+ )
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file_and_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ scopes = ["1", "2"]
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(
+ target, credentials_file=credentials_file, scopes=scopes
+ )
+
+ google.auth.load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=scopes, default_scopes=None
+ )
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+@mock.patch(
+ "google.auth.load_credentials_from_file",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
+def test_create_channel_with_credentials_file_and_default_scopes(
+ load_credentials_from_file, grpc_secure_channel, composite_creds_call
+):
+ target = "example.com:443"
+ default_scopes = ["3", "4"]
+
+ credentials_file = "/path/to/credentials/file.json"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(
+ target, credentials_file=credentials_file, default_scopes=default_scopes
+ )
+
+ load_credentials_from_file.assert_called_once_with(
+ credentials_file, scopes=None, default_scopes=default_scopes
+ )
+
+ assert channel is grpc_secure_channel.return_value
+ if grpc_helpers.HAS_GRPC_GCP:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ else:
+ grpc_secure_channel.assert_called_once_with(target, composite_creds)
+
+
+@pytest.mark.skipif(
+ not grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available"
+)
+@mock.patch("grpc_gcp.secure_channel")
+def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel):
+ target = "example.com:443"
+ scopes = ["test_scope"]
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
+ grpc_gcp_secure_channel.assert_called()
+
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
+
+
+@pytest.mark.skipif(grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_without_grpc_gcp(grpc_secure_channel):
+ target = "example.com:443"
+ scopes = ["test_scope"]
+
+ credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
+ credentials.requires_scopes = True
+
+ grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
+ grpc_secure_channel.assert_called()
+
+ credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
+
+
+class TestChannelStub(object):
+ def test_single_response(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ expected_request = operations_pb2.GetOperationRequest(name="meep")
+ expected_response = operations_pb2.Operation(name="moop")
+
+ channel.GetOperation.response = expected_response
+
+ response = stub.GetOperation(expected_request)
+
+ assert response == expected_response
+ assert channel.requests == [("GetOperation", expected_request)]
+ assert channel.GetOperation.requests == [expected_request]
+
+ def test_no_response(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ expected_request = operations_pb2.GetOperationRequest(name="meep")
+
+ with pytest.raises(ValueError) as exc_info:
+ stub.GetOperation(expected_request)
+
+ assert exc_info.match("GetOperation")
+
+ def test_missing_method(self):
+ channel = grpc_helpers.ChannelStub()
+
+ with pytest.raises(AttributeError):
+ channel.DoesNotExist.response
+
+ def test_exception_response(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ expected_request = operations_pb2.GetOperationRequest(name="meep")
+
+ channel.GetOperation.response = RuntimeError()
+
+ with pytest.raises(RuntimeError):
+ stub.GetOperation(expected_request)
+
+ def test_callable_response(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ expected_request = operations_pb2.GetOperationRequest(name="meep")
+ expected_response = operations_pb2.Operation(name="moop")
+
+ on_get_operation = mock.Mock(spec=("__call__",), return_value=expected_response)
+
+ channel.GetOperation.response = on_get_operation
+
+ response = stub.GetOperation(expected_request)
+
+ assert response == expected_response
+ on_get_operation.assert_called_once_with(expected_request)
+
+ def test_multiple_responses(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ expected_request = operations_pb2.GetOperationRequest(name="meep")
+ expected_responses = [
+ operations_pb2.Operation(name="foo"),
+ operations_pb2.Operation(name="bar"),
+ operations_pb2.Operation(name="baz"),
+ ]
+
+ channel.GetOperation.responses = iter(expected_responses)
+
+ response1 = stub.GetOperation(expected_request)
+ response2 = stub.GetOperation(expected_request)
+ response3 = stub.GetOperation(expected_request)
+
+ assert response1 == expected_responses[0]
+ assert response2 == expected_responses[1]
+ assert response3 == expected_responses[2]
+ assert channel.requests == [("GetOperation", expected_request)] * 3
+ assert channel.GetOperation.requests == [expected_request] * 3
+
+ with pytest.raises(StopIteration):
+ stub.GetOperation(expected_request)
+
+ def test_multiple_responses_and_single_response_error(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ channel.GetOperation.responses = []
+ channel.GetOperation.response = mock.sentinel.response
+
+ with pytest.raises(ValueError):
+ stub.GetOperation(operations_pb2.GetOperationRequest())
+
+ def test_call_info(self):
+ channel = grpc_helpers.ChannelStub()
+ stub = operations_pb2.OperationsStub(channel)
+ expected_request = operations_pb2.GetOperationRequest(name="meep")
+ expected_response = operations_pb2.Operation(name="moop")
+ expected_metadata = [("red", "blue"), ("two", "shoe")]
+ expected_credentials = mock.sentinel.credentials
+ channel.GetOperation.response = expected_response
+
+ response = stub.GetOperation(
+ expected_request,
+ timeout=42,
+ metadata=expected_metadata,
+ credentials=expected_credentials,
+ )
+
+ assert response == expected_response
+ assert channel.requests == [("GetOperation", expected_request)]
+ assert channel.GetOperation.calls == [
+ (expected_request, 42, expected_metadata, expected_credentials)
+ ]
+
+ def test_unary_unary(self):
+ channel = grpc_helpers.ChannelStub()
+ method_name = "GetOperation"
+ callable_stub = channel.unary_unary(method_name)
+ assert callable_stub._method == method_name
+ assert callable_stub._channel == channel
+
+ def test_unary_stream(self):
+ channel = grpc_helpers.ChannelStub()
+ method_name = "GetOperation"
+ callable_stub = channel.unary_stream(method_name)
+ assert callable_stub._method == method_name
+ assert callable_stub._channel == channel
+
+ def test_stream_unary(self):
+ channel = grpc_helpers.ChannelStub()
+ method_name = "GetOperation"
+ callable_stub = channel.stream_unary(method_name)
+ assert callable_stub._method == method_name
+ assert callable_stub._channel == channel
+
+ def test_stream_stream(self):
+ channel = grpc_helpers.ChannelStub()
+ method_name = "GetOperation"
+ callable_stub = channel.stream_stream(method_name)
+ assert callable_stub._method == method_name
+ assert callable_stub._channel == channel
+
+ def test_subscribe_unsubscribe(self):
+ channel = grpc_helpers.ChannelStub()
+ assert channel.subscribe(None) is None
+ assert channel.unsubscribe(None) is None
+
+ def test_close(self):
+ channel = grpc_helpers.ChannelStub()
+ assert channel.close() is None
diff --git a/tests/unit/test_iam.py b/tests/unit/test_iam.py
new file mode 100644
index 0000000..fbd242e
--- /dev/null
+++ b/tests/unit/test_iam.py
@@ -0,0 +1,382 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core.iam import _DICT_ACCESS_MSG, InvalidOperationException
+
+
+class TestPolicy:
+ @staticmethod
+ def _get_target_class():
+ from google.api_core.iam import Policy
+
+ return Policy
+
+ def _make_one(self, *args, **kw):
+ return self._get_target_class()(*args, **kw)
+
+ def test_ctor_defaults(self):
+ empty = frozenset()
+ policy = self._make_one()
+ assert policy.etag is None
+ assert policy.version is None
+ assert policy.owners == empty
+ assert policy.editors == empty
+ assert policy.viewers == empty
+ assert len(policy) == 0
+ assert dict(policy) == {}
+
+ def test_ctor_explicit(self):
+ VERSION = 1
+ ETAG = "ETAG"
+ empty = frozenset()
+ policy = self._make_one(ETAG, VERSION)
+ assert policy.etag == ETAG
+ assert policy.version == VERSION
+ assert policy.owners == empty
+ assert policy.editors == empty
+ assert policy.viewers == empty
+ assert len(policy) == 0
+ assert dict(policy) == {}
+
+ def test___getitem___miss(self):
+ policy = self._make_one()
+ assert policy["nonesuch"] == set()
+
+ def test__getitem___and_set(self):
+ from google.api_core.iam import OWNER_ROLE
+
+ policy = self._make_one()
+
+ # get the policy using the getter and then modify it
+ policy[OWNER_ROLE].add("user:phred@example.com")
+ assert dict(policy) == {OWNER_ROLE: {"user:phred@example.com"}}
+
+ def test___getitem___version3(self):
+ policy = self._make_one("DEADBEEF", 3)
+ with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
+ policy["role"]
+
+ def test___getitem___with_conditions(self):
+ USER = "user:phred@example.com"
+ CONDITION = {"expression": "2 > 1"}
+ policy = self._make_one("DEADBEEF", 1)
+ policy.bindings = [
+ {"role": "role/reader", "members": [USER], "condition": CONDITION}
+ ]
+ with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
+ policy["role/reader"]
+
+ def test___setitem__(self):
+ USER = "user:phred@example.com"
+ PRINCIPALS = set([USER])
+ policy = self._make_one()
+ policy["rolename"] = [USER]
+ assert policy["rolename"] == PRINCIPALS
+ assert len(policy) == 1
+ assert dict(policy) == {"rolename": PRINCIPALS}
+
+ def test__set_item__overwrite(self):
+ GROUP = "group:test@group.com"
+ USER = "user:phred@example.com"
+ ALL_USERS = "allUsers"
+ MEMBERS = set([ALL_USERS])
+ GROUPS = set([GROUP])
+ policy = self._make_one()
+ policy["first"] = [GROUP]
+ policy["second"] = [USER]
+ policy["second"] = [ALL_USERS]
+ assert policy["second"] == MEMBERS
+ assert len(policy) == 2
+ assert dict(policy) == {"first": GROUPS, "second": MEMBERS}
+
+ def test___setitem___version3(self):
+ policy = self._make_one("DEADBEEF", 3)
+ with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
+ policy["role/reader"] = ["user:phred@example.com"]
+
+ def test___setitem___with_conditions(self):
+ USER = "user:phred@example.com"
+ CONDITION = {"expression": "2 > 1"}
+ policy = self._make_one("DEADBEEF", 1)
+ policy.bindings = [
+ {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
+ ]
+ with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
+ policy["role/reader"] = ["user:phred@example.com"]
+
+ def test___delitem___hit(self):
+ policy = self._make_one()
+ policy.bindings = [
+ {"role": "to/keep", "members": set(["phred@example.com"])},
+ {"role": "to/remove", "members": set(["phred@example.com"])},
+ ]
+ del policy["to/remove"]
+ assert len(policy) == 1
+ assert dict(policy) == {"to/keep": set(["phred@example.com"])}
+
+ def test___delitem___miss(self):
+ policy = self._make_one()
+ with pytest.raises(KeyError):
+ del policy["nonesuch"]
+
+ def test___delitem___version3(self):
+ policy = self._make_one("DEADBEEF", 3)
+ with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
+ del policy["role/reader"]
+
+ def test___delitem___with_conditions(self):
+ USER = "user:phred@example.com"
+ CONDITION = {"expression": "2 > 1"}
+ policy = self._make_one("DEADBEEF", 1)
+ policy.bindings = [
+ {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
+ ]
+ with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG):
+ del policy["role/reader"]
+
+ def test_bindings_property(self):
+ USER = "user:phred@example.com"
+ CONDITION = {"expression": "2 > 1"}
+ policy = self._make_one()
+ BINDINGS = [
+ {"role": "role/reader", "members": set([USER]), "condition": CONDITION}
+ ]
+ policy.bindings = BINDINGS
+ assert policy.bindings == BINDINGS
+
+ def test_owners_getter(self):
+ from google.api_core.iam import OWNER_ROLE
+
+ MEMBER = "user:phred@example.com"
+ expected = frozenset([MEMBER])
+ policy = self._make_one()
+ policy[OWNER_ROLE] = [MEMBER]
+ assert policy.owners == expected
+
+ def test_owners_setter(self):
+ import warnings
+ from google.api_core.iam import OWNER_ROLE
+
+ MEMBER = "user:phred@example.com"
+ expected = set([MEMBER])
+ policy = self._make_one()
+
+ with warnings.catch_warnings(record=True) as warned:
+ policy.owners = [MEMBER]
+
+ (warning,) = warned
+ assert warning.category is DeprecationWarning
+ assert policy[OWNER_ROLE] == expected
+
+ def test_editors_getter(self):
+ from google.api_core.iam import EDITOR_ROLE
+
+ MEMBER = "user:phred@example.com"
+ expected = frozenset([MEMBER])
+ policy = self._make_one()
+ policy[EDITOR_ROLE] = [MEMBER]
+ assert policy.editors == expected
+
+ def test_editors_setter(self):
+ import warnings
+ from google.api_core.iam import EDITOR_ROLE
+
+ MEMBER = "user:phred@example.com"
+ expected = set([MEMBER])
+ policy = self._make_one()
+
+ with warnings.catch_warnings(record=True) as warned:
+ policy.editors = [MEMBER]
+
+ (warning,) = warned
+ assert warning.category is DeprecationWarning
+ assert policy[EDITOR_ROLE] == expected
+
+ def test_viewers_getter(self):
+ from google.api_core.iam import VIEWER_ROLE
+
+ MEMBER = "user:phred@example.com"
+ expected = frozenset([MEMBER])
+ policy = self._make_one()
+ policy[VIEWER_ROLE] = [MEMBER]
+ assert policy.viewers == expected
+
+ def test_viewers_setter(self):
+ import warnings
+ from google.api_core.iam import VIEWER_ROLE
+
+ MEMBER = "user:phred@example.com"
+ expected = set([MEMBER])
+ policy = self._make_one()
+
+ with warnings.catch_warnings(record=True) as warned:
+ policy.viewers = [MEMBER]
+
+ (warning,) = warned
+ assert warning.category is DeprecationWarning
+ assert policy[VIEWER_ROLE] == expected
+
+ def test_user(self):
+ EMAIL = "phred@example.com"
+ MEMBER = "user:%s" % (EMAIL,)
+ policy = self._make_one()
+ assert policy.user(EMAIL) == MEMBER
+
+ def test_service_account(self):
+ EMAIL = "phred@example.com"
+ MEMBER = "serviceAccount:%s" % (EMAIL,)
+ policy = self._make_one()
+ assert policy.service_account(EMAIL) == MEMBER
+
+ def test_group(self):
+ EMAIL = "phred@example.com"
+ MEMBER = "group:%s" % (EMAIL,)
+ policy = self._make_one()
+ assert policy.group(EMAIL) == MEMBER
+
+ def test_domain(self):
+ DOMAIN = "example.com"
+ MEMBER = "domain:%s" % (DOMAIN,)
+ policy = self._make_one()
+ assert policy.domain(DOMAIN) == MEMBER
+
+ def test_all_users(self):
+ policy = self._make_one()
+ assert policy.all_users() == "allUsers"
+
+ def test_authenticated_users(self):
+ policy = self._make_one()
+ assert policy.authenticated_users() == "allAuthenticatedUsers"
+
+ def test_from_api_repr_only_etag(self):
+ empty = frozenset()
+ RESOURCE = {"etag": "ACAB"}
+ klass = self._get_target_class()
+ policy = klass.from_api_repr(RESOURCE)
+ assert policy.etag == "ACAB"
+ assert policy.version is None
+ assert policy.owners == empty
+ assert policy.editors == empty
+ assert policy.viewers == empty
+ assert dict(policy) == {}
+
+ def test_from_api_repr_complete(self):
+ from google.api_core.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE
+
+ OWNER1 = "group:cloud-logs@google.com"
+ OWNER2 = "user:phred@example.com"
+ EDITOR1 = "domain:google.com"
+ EDITOR2 = "user:phred@example.com"
+ VIEWER1 = "serviceAccount:1234-abcdef@service.example.com"
+ VIEWER2 = "user:phred@example.com"
+ RESOURCE = {
+ "etag": "DEADBEEF",
+ "version": 1,
+ "bindings": [
+ {"role": OWNER_ROLE, "members": [OWNER1, OWNER2]},
+ {"role": EDITOR_ROLE, "members": [EDITOR1, EDITOR2]},
+ {"role": VIEWER_ROLE, "members": [VIEWER1, VIEWER2]},
+ ],
+ }
+ klass = self._get_target_class()
+ policy = klass.from_api_repr(RESOURCE)
+ assert policy.etag == "DEADBEEF"
+ assert policy.version == 1
+ assert policy.owners, frozenset([OWNER1 == OWNER2])
+ assert policy.editors, frozenset([EDITOR1 == EDITOR2])
+ assert policy.viewers, frozenset([VIEWER1 == VIEWER2])
+ assert dict(policy) == {
+ OWNER_ROLE: set([OWNER1, OWNER2]),
+ EDITOR_ROLE: set([EDITOR1, EDITOR2]),
+ VIEWER_ROLE: set([VIEWER1, VIEWER2]),
+ }
+ assert policy.bindings == [
+ {"role": OWNER_ROLE, "members": set([OWNER1, OWNER2])},
+ {"role": EDITOR_ROLE, "members": set([EDITOR1, EDITOR2])},
+ {"role": VIEWER_ROLE, "members": set([VIEWER1, VIEWER2])},
+ ]
+
+ def test_from_api_repr_unknown_role(self):
+ USER = "user:phred@example.com"
+ GROUP = "group:cloud-logs@google.com"
+ RESOURCE = {
+ "etag": "DEADBEEF",
+ "version": 1,
+ "bindings": [{"role": "unknown", "members": [USER, GROUP]}],
+ }
+ klass = self._get_target_class()
+ policy = klass.from_api_repr(RESOURCE)
+ assert policy.etag == "DEADBEEF"
+ assert policy.version == 1
+ assert dict(policy), {"unknown": set([GROUP == USER])}
+
+ def test_to_api_repr_defaults(self):
+ policy = self._make_one()
+ assert policy.to_api_repr() == {}
+
+ def test_to_api_repr_only_etag(self):
+ policy = self._make_one("DEADBEEF")
+ assert policy.to_api_repr() == {"etag": "DEADBEEF"}
+
+ def test_to_api_repr_binding_wo_members(self):
+ policy = self._make_one()
+ policy["empty"] = []
+ assert policy.to_api_repr() == {}
+
+ def test_to_api_repr_binding_w_duplicates(self):
+ import warnings
+ from google.api_core.iam import OWNER_ROLE
+
+ OWNER = "group:cloud-logs@google.com"
+ policy = self._make_one()
+ with warnings.catch_warnings(record=True):
+ policy.owners = [OWNER, OWNER]
+ assert policy.to_api_repr() == {
+ "bindings": [{"role": OWNER_ROLE, "members": [OWNER]}]
+ }
+
+ def test_to_api_repr_full(self):
+ import operator
+ from google.api_core.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE
+
+ OWNER1 = "group:cloud-logs@google.com"
+ OWNER2 = "user:phred@example.com"
+ EDITOR1 = "domain:google.com"
+ EDITOR2 = "user:phred@example.com"
+ VIEWER1 = "serviceAccount:1234-abcdef@service.example.com"
+ VIEWER2 = "user:phred@example.com"
+ CONDITION = {
+ "title": "title",
+ "description": "description",
+ "expression": "true",
+ }
+ BINDINGS = [
+ {"role": OWNER_ROLE, "members": [OWNER1, OWNER2]},
+ {"role": EDITOR_ROLE, "members": [EDITOR1, EDITOR2]},
+ {"role": VIEWER_ROLE, "members": [VIEWER1, VIEWER2]},
+ {
+ "role": VIEWER_ROLE,
+ "members": [VIEWER1, VIEWER2],
+ "condition": CONDITION,
+ },
+ ]
+ policy = self._make_one("DEADBEEF", 1)
+ policy.bindings = BINDINGS
+ resource = policy.to_api_repr()
+ assert resource["etag"] == "DEADBEEF"
+ assert resource["version"] == 1
+ key = operator.itemgetter("role")
+ assert sorted(resource["bindings"], key=key) == sorted(BINDINGS, key=key)
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
new file mode 100644
index 0000000..22e23bc
--- /dev/null
+++ b/tests/unit/test_operation.py
@@ -0,0 +1,326 @@
+# Copyright 2017, Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import exceptions
+from google.api_core import operation
+from google.api_core import operations_v1
+from google.api_core import retry
+from google.longrunning import operations_pb2
+from google.protobuf import struct_pb2
+from google.rpc import code_pb2
+from google.rpc import status_pb2
+
+TEST_OPERATION_NAME = "test/operation"
+
+
+def make_operation_proto(
+ name=TEST_OPERATION_NAME, metadata=None, response=None, error=None, **kwargs
+):
+ operation_proto = operations_pb2.Operation(name=name, **kwargs)
+
+ if metadata is not None:
+ operation_proto.metadata.Pack(metadata)
+
+ if response is not None:
+ operation_proto.response.Pack(response)
+
+ if error is not None:
+ operation_proto.error.CopyFrom(error)
+
+ return operation_proto
+
+
+def make_operation_future(client_operations_responses=None):
+ if client_operations_responses is None:
+ client_operations_responses = [make_operation_proto()]
+
+ refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
+ refresh.responses = client_operations_responses
+ cancel = mock.Mock(spec=["__call__"])
+ operation_future = operation.Operation(
+ client_operations_responses[0],
+ refresh,
+ cancel,
+ result_type=struct_pb2.Struct,
+ metadata_type=struct_pb2.Struct,
+ )
+
+ return operation_future, refresh, cancel
+
+
+def test_constructor():
+ future, refresh, _ = make_operation_future()
+
+ assert future.operation == refresh.responses[0]
+ assert future.operation.done is False
+ assert future.operation.name == TEST_OPERATION_NAME
+ assert future.metadata is None
+ assert future.running()
+
+
+def test_metadata():
+ expected_metadata = struct_pb2.Struct()
+ future, _, _ = make_operation_future(
+ [make_operation_proto(metadata=expected_metadata)]
+ )
+
+ assert future.metadata == expected_metadata
+
+
+def test_cancellation():
+ responses = [
+ make_operation_proto(),
+ # Second response indicates that the operation was cancelled.
+ make_operation_proto(
+ done=True, error=status_pb2.Status(code=code_pb2.CANCELLED)
+ ),
+ ]
+ future, _, cancel = make_operation_future(responses)
+
+ assert future.cancel()
+ assert future.cancelled()
+ cancel.assert_called_once_with()
+
+ # Cancelling twice should have no effect.
+ assert not future.cancel()
+ cancel.assert_called_once_with()
+
+
+def test_result():
+ expected_result = struct_pb2.Struct()
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the result.
+ make_operation_proto(done=True, response=expected_result),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ result = future.result()
+
+ assert result == expected_result
+ assert future.done()
+
+
+def test_done_w_retry():
+ RETRY_PREDICATE = retry.if_exception_type(exceptions.TooManyRequests)
+ test_retry = retry.Retry(predicate=RETRY_PREDICATE)
+
+ expected_result = struct_pb2.Struct()
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the result.
+ make_operation_proto(done=True, response=expected_result),
+ ]
+ future, _, _ = make_operation_future(responses)
+ future._refresh = mock.Mock()
+
+ future.done(retry=test_retry)
+ future._refresh.assert_called_once_with(retry=test_retry)
+
+
+def test_exception():
+ expected_exception = status_pb2.Status(message="meep")
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the error.
+ make_operation_proto(done=True, error=expected_exception),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ exception = future.exception()
+
+ assert expected_exception.message in "{!r}".format(exception)
+
+
+def test_exception_with_error_code():
+ expected_exception = status_pb2.Status(message="meep", code=5)
+ responses = [
+ make_operation_proto(),
+ # Second operation response includes the error.
+ make_operation_proto(done=True, error=expected_exception),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ exception = future.exception()
+
+ assert expected_exception.message in "{!r}".format(exception)
+ # Status Code 5 maps to Not Found
+ # https://developers.google.com/maps-booking/reference/grpc-api/status_codes
+ assert isinstance(exception, exceptions.NotFound)
+
+
+def test_unexpected_result():
+ responses = [
+ make_operation_proto(),
+ # Second operation response is done, but has not error or response.
+ make_operation_proto(done=True),
+ ]
+ future, _, _ = make_operation_future(responses)
+
+ exception = future.exception()
+
+ assert "Unexpected state" in "{!r}".format(exception)
+
+
+def test__refresh_http():
+ json_response = {"name": TEST_OPERATION_NAME, "done": True}
+ api_request = mock.Mock(return_value=json_response)
+
+ result = operation._refresh_http(api_request, TEST_OPERATION_NAME)
+
+ assert isinstance(result, operations_pb2.Operation)
+ assert result.name == TEST_OPERATION_NAME
+ assert result.done is True
+
+ api_request.assert_called_once_with(
+ method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
+ )
+
+
+def test__refresh_http_w_retry():
+ json_response = {"name": TEST_OPERATION_NAME, "done": True}
+ api_request = mock.Mock()
+ retry = mock.Mock()
+ retry.return_value.return_value = json_response
+
+ result = operation._refresh_http(api_request, TEST_OPERATION_NAME, retry=retry)
+
+ assert isinstance(result, operations_pb2.Operation)
+ assert result.name == TEST_OPERATION_NAME
+ assert result.done is True
+
+ api_request.assert_not_called()
+ retry.assert_called_once_with(api_request)
+ retry.return_value.assert_called_once_with(
+ method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
+ )
+
+
+def test__cancel_http():
+ api_request = mock.Mock()
+
+ operation._cancel_http(api_request, TEST_OPERATION_NAME)
+
+ api_request.assert_called_once_with(
+ method="POST", path="operations/{}:cancel".format(TEST_OPERATION_NAME)
+ )
+
+
+def test_from_http_json():
+ operation_json = {"name": TEST_OPERATION_NAME, "done": True}
+ api_request = mock.sentinel.api_request
+
+ future = operation.from_http_json(
+ operation_json, api_request, struct_pb2.Struct, metadata_type=struct_pb2.Struct
+ )
+
+ assert future._result_type == struct_pb2.Struct
+ assert future._metadata_type == struct_pb2.Struct
+ assert future.operation.name == TEST_OPERATION_NAME
+ assert future.done
+
+
+def test__refresh_grpc():
+ operations_stub = mock.Mock(spec=["GetOperation"])
+ expected_result = make_operation_proto(done=True)
+ operations_stub.GetOperation.return_value = expected_result
+
+ result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME)
+
+ assert result == expected_result
+ expected_request = operations_pb2.GetOperationRequest(name=TEST_OPERATION_NAME)
+ operations_stub.GetOperation.assert_called_once_with(expected_request)
+
+
+def test__refresh_grpc_w_retry():
+ operations_stub = mock.Mock(spec=["GetOperation"])
+ expected_result = make_operation_proto(done=True)
+ retry = mock.Mock()
+ retry.return_value.return_value = expected_result
+
+ result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME, retry=retry)
+
+ assert result == expected_result
+ expected_request = operations_pb2.GetOperationRequest(name=TEST_OPERATION_NAME)
+ operations_stub.GetOperation.assert_not_called()
+ retry.assert_called_once_with(operations_stub.GetOperation)
+ retry.return_value.assert_called_once_with(expected_request)
+
+
+def test__cancel_grpc():
+ operations_stub = mock.Mock(spec=["CancelOperation"])
+
+ operation._cancel_grpc(operations_stub, TEST_OPERATION_NAME)
+
+ expected_request = operations_pb2.CancelOperationRequest(name=TEST_OPERATION_NAME)
+ operations_stub.CancelOperation.assert_called_once_with(expected_request)
+
+
+def test_from_grpc():
+ operation_proto = make_operation_proto(done=True)
+ operations_stub = mock.sentinel.operations_stub
+
+ future = operation.from_grpc(
+ operation_proto,
+ operations_stub,
+ struct_pb2.Struct,
+ metadata_type=struct_pb2.Struct,
+ grpc_metadata=[("x-goog-request-params", "foo")],
+ )
+
+ assert future._result_type == struct_pb2.Struct
+ assert future._metadata_type == struct_pb2.Struct
+ assert future.operation.name == TEST_OPERATION_NAME
+ assert future.done
+ assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
+ assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
+
+
+def test_from_gapic():
+ operation_proto = make_operation_proto(done=True)
+ operations_client = mock.create_autospec(
+ operations_v1.OperationsClient, instance=True
+ )
+
+ future = operation.from_gapic(
+ operation_proto,
+ operations_client,
+ struct_pb2.Struct,
+ metadata_type=struct_pb2.Struct,
+ grpc_metadata=[("x-goog-request-params", "foo")],
+ )
+
+ assert future._result_type == struct_pb2.Struct
+ assert future._metadata_type == struct_pb2.Struct
+ assert future.operation.name == TEST_OPERATION_NAME
+ assert future.done
+ assert future._refresh.keywords["metadata"] == [("x-goog-request-params", "foo")]
+ assert future._cancel.keywords["metadata"] == [("x-goog-request-params", "foo")]
+
+
+def test_deserialize():
+ op = make_operation_proto(name="foobarbaz")
+ serialized = op.SerializeToString()
+ deserialized_op = operation.Operation.deserialize(serialized)
+ assert op.name == deserialized_op.name
+ assert type(op) is type(deserialized_op)
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
new file mode 100644
index 0000000..a44e998
--- /dev/null
+++ b/tests/unit/test_page_iterator.py
@@ -0,0 +1,665 @@
+# Copyright 2015 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import math
+import types
+
+import mock
+import pytest
+
+from google.api_core import page_iterator
+
+
+def test__do_nothing_page_start():
+ assert page_iterator._do_nothing_page_start(None, None, None) is None
+
+
+class TestPage(object):
+ def test_constructor(self):
+ parent = mock.sentinel.parent
+ item_to_value = mock.sentinel.item_to_value
+
+ page = page_iterator.Page(parent, (1, 2, 3), item_to_value)
+
+ assert page.num_items == 3
+ assert page.remaining == 3
+ assert page._parent is parent
+ assert page._item_to_value is item_to_value
+ assert page.raw_page is None
+
+ def test___iter__(self):
+ page = page_iterator.Page(None, (), None, None)
+ assert iter(page) is page
+
+ def test_iterator_calls_parent_item_to_value(self):
+ parent = mock.sentinel.parent
+
+ item_to_value = mock.Mock(
+ side_effect=lambda iterator, value: value, spec=["__call__"]
+ )
+
+ page = page_iterator.Page(parent, (10, 11, 12), item_to_value)
+ page._remaining = 100
+
+ assert item_to_value.call_count == 0
+ assert page.remaining == 100
+
+ assert next(page) == 10
+ assert item_to_value.call_count == 1
+ item_to_value.assert_called_with(parent, 10)
+ assert page.remaining == 99
+
+ assert next(page) == 11
+ assert item_to_value.call_count == 2
+ item_to_value.assert_called_with(parent, 11)
+ assert page.remaining == 98
+
+ assert next(page) == 12
+ assert item_to_value.call_count == 3
+ item_to_value.assert_called_with(parent, 12)
+ assert page.remaining == 97
+
+ def test_raw_page(self):
+ parent = mock.sentinel.parent
+ item_to_value = mock.sentinel.item_to_value
+
+ raw_page = mock.sentinel.raw_page
+
+ page = page_iterator.Page(parent, (1, 2, 3), item_to_value, raw_page=raw_page)
+ assert page.raw_page is raw_page
+
+ with pytest.raises(AttributeError):
+ page.raw_page = None
+
+
+class PageIteratorImpl(page_iterator.Iterator):
+ def _next_page(self):
+ return mock.create_autospec(page_iterator.Page, instance=True)
+
+
+class TestIterator(object):
+ def test_constructor(self):
+ client = mock.sentinel.client
+ item_to_value = mock.sentinel.item_to_value
+ token = "ab13nceor03"
+ max_results = 1337
+
+ iterator = PageIteratorImpl(
+ client, item_to_value, page_token=token, max_results=max_results
+ )
+
+ assert not iterator._started
+ assert iterator.client is client
+ assert iterator.item_to_value == item_to_value
+ assert iterator.max_results == max_results
+ # Changing attributes.
+ assert iterator.page_number == 0
+ assert iterator.next_page_token == token
+ assert iterator.num_results == 0
+
+ def test_next(self):
+ iterator = PageIteratorImpl(None, None)
+ page_1 = page_iterator.Page(
+ iterator, ("item 1.1", "item 1.2"), page_iterator._item_to_value_identity
+ )
+ page_2 = page_iterator.Page(
+ iterator, ("item 2.1",), page_iterator._item_to_value_identity
+ )
+ iterator._next_page = mock.Mock(side_effect=[page_1, page_2, None])
+
+ result = next(iterator)
+ assert result == "item 1.1"
+ result = next(iterator)
+ assert result == "item 1.2"
+ result = next(iterator)
+ assert result == "item 2.1"
+
+ with pytest.raises(StopIteration):
+ next(iterator)
+
+ def test_pages_property_starts(self):
+ iterator = PageIteratorImpl(None, None)
+
+ assert not iterator._started
+
+ assert isinstance(iterator.pages, types.GeneratorType)
+
+ assert iterator._started
+
+ def test_pages_property_restart(self):
+ iterator = PageIteratorImpl(None, None)
+
+ assert iterator.pages
+
+ # Make sure we cannot restart.
+ with pytest.raises(ValueError):
+ assert iterator.pages
+
+ def test__page_iter_increment(self):
+ iterator = PageIteratorImpl(None, None)
+ page = page_iterator.Page(
+ iterator, ("item",), page_iterator._item_to_value_identity
+ )
+ iterator._next_page = mock.Mock(side_effect=[page, None])
+
+ assert iterator.num_results == 0
+
+ page_iter = iterator._page_iter(increment=True)
+ next(page_iter)
+
+ assert iterator.num_results == 1
+
+ def test__page_iter_no_increment(self):
+ iterator = PageIteratorImpl(None, None)
+
+ assert iterator.num_results == 0
+
+ page_iter = iterator._page_iter(increment=False)
+ next(page_iter)
+
+ # results should still be 0 after fetching a page.
+ assert iterator.num_results == 0
+
+ def test__items_iter(self):
+ # Items to be returned.
+ item1 = 17
+ item2 = 100
+ item3 = 211
+
+ # Make pages from mock responses
+ parent = mock.sentinel.parent
+ page1 = page_iterator.Page(
+ parent, (item1, item2), page_iterator._item_to_value_identity
+ )
+ page2 = page_iterator.Page(
+ parent, (item3,), page_iterator._item_to_value_identity
+ )
+
+ iterator = PageIteratorImpl(None, None)
+ iterator._next_page = mock.Mock(side_effect=[page1, page2, None])
+
+ items_iter = iterator._items_iter()
+
+ assert isinstance(items_iter, types.GeneratorType)
+
+ # Consume items and check the state of the iterator.
+ assert iterator.num_results == 0
+
+ assert next(items_iter) == item1
+ assert iterator.num_results == 1
+
+ assert next(items_iter) == item2
+ assert iterator.num_results == 2
+
+ assert next(items_iter) == item3
+ assert iterator.num_results == 3
+
+ with pytest.raises(StopIteration):
+ next(items_iter)
+
+ def test___iter__(self):
+ iterator = PageIteratorImpl(None, None)
+ iterator._next_page = mock.Mock(side_effect=[(1, 2), (3,), None])
+
+ assert not iterator._started
+
+ result = list(iterator)
+
+ assert result == [1, 2, 3]
+ assert iterator._started
+
+ def test___iter__restart(self):
+ iterator = PageIteratorImpl(None, None)
+
+ iter(iterator)
+
+ # Make sure we cannot restart.
+ with pytest.raises(ValueError):
+ iter(iterator)
+
+ def test___iter___restart_after_page(self):
+ iterator = PageIteratorImpl(None, None)
+
+ assert iterator.pages
+
+ # Make sure we cannot restart after starting the page iterator
+ with pytest.raises(ValueError):
+ iter(iterator)
+
+
+class TestHTTPIterator(object):
+ def test_constructor(self):
+ client = mock.sentinel.client
+ path = "/foo"
+ iterator = page_iterator.HTTPIterator(
+ client, mock.sentinel.api_request, path, mock.sentinel.item_to_value
+ )
+
+ assert not iterator._started
+ assert iterator.client is client
+ assert iterator.path == path
+ assert iterator.item_to_value is mock.sentinel.item_to_value
+ assert iterator._items_key == "items"
+ assert iterator.max_results is None
+ assert iterator.extra_params == {}
+ assert iterator._page_start == page_iterator._do_nothing_page_start
+ # Changing attributes.
+ assert iterator.page_number == 0
+ assert iterator.next_page_token is None
+ assert iterator.num_results == 0
+ assert iterator._page_size is None
+
+ def test_constructor_w_extra_param_collision(self):
+ extra_params = {"pageToken": "val"}
+
+ with pytest.raises(ValueError):
+ page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ extra_params=extra_params,
+ )
+
+ def test_iterate(self):
+ path = "/foo"
+ item1 = {"name": "1"}
+ item2 = {"name": "2"}
+ api_request = mock.Mock(return_value={"items": [item1, item2]})
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ api_request,
+ path=path,
+ item_to_value=page_iterator._item_to_value_identity,
+ )
+
+ assert iterator.num_results == 0
+
+ items_iter = iter(iterator)
+
+ val1 = next(items_iter)
+ assert val1 == item1
+ assert iterator.num_results == 1
+
+ val2 = next(items_iter)
+ assert val2 == item2
+ assert iterator.num_results == 2
+
+ with pytest.raises(StopIteration):
+ next(items_iter)
+
+ api_request.assert_called_once_with(method="GET", path=path, query_params={})
+
+ def test__has_next_page_new(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ )
+
+ # The iterator should *always* indicate that it has a next page
+ # when created so that it can fetch the initial page.
+ assert iterator._has_next_page()
+
+ def test__has_next_page_without_token(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ )
+
+ iterator.page_number = 1
+
+ # The iterator should not indicate that it has a new page if the
+ # initial page has been requested and there's no page token.
+ assert not iterator._has_next_page()
+
+ def test__has_next_page_w_number_w_token(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ )
+
+ iterator.page_number = 1
+ iterator.next_page_token = mock.sentinel.token
+
+ # The iterator should indicate that it has a new page if the
+ # initial page has been requested and there's is a page token.
+ assert iterator._has_next_page()
+
+ def test__has_next_page_w_max_results_not_done(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ max_results=3,
+ page_token=mock.sentinel.token,
+ )
+
+ iterator.page_number = 1
+
+ # The iterator should indicate that it has a new page if there
+ # is a page token and it has not consumed more than max_results.
+ assert iterator.num_results < iterator.max_results
+ assert iterator._has_next_page()
+
+ def test__has_next_page_w_max_results_done(self):
+
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ max_results=3,
+ page_token=mock.sentinel.token,
+ )
+
+ iterator.page_number = 1
+ iterator.num_results = 3
+
+ # The iterator should not indicate that it has a new page if there
+ # if it has consumed more than max_results.
+ assert iterator.num_results == iterator.max_results
+ assert not iterator._has_next_page()
+
+ def test__get_query_params_no_token(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ )
+
+ assert iterator._get_query_params() == {}
+
+ def test__get_query_params_w_token(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ )
+ iterator.next_page_token = "token"
+
+ assert iterator._get_query_params() == {"pageToken": iterator.next_page_token}
+
+ def test__get_query_params_w_max_results(self):
+ max_results = 3
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ max_results=max_results,
+ )
+
+ iterator.num_results = 1
+ local_max = max_results - iterator.num_results
+
+ assert iterator._get_query_params() == {"maxResults": local_max}
+
+ def test__get_query_params_extra_params(self):
+ extra_params = {"key": "val"}
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ extra_params=extra_params,
+ )
+
+ assert iterator._get_query_params() == extra_params
+
+ def test__get_next_page_response_with_post(self):
+ path = "/foo"
+ page_response = {"items": ["one", "two"]}
+ api_request = mock.Mock(return_value=page_response)
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ api_request,
+ path=path,
+ item_to_value=page_iterator._item_to_value_identity,
+ )
+ iterator._HTTP_METHOD = "POST"
+
+ response = iterator._get_next_page_response()
+
+ assert response == page_response
+
+ api_request.assert_called_once_with(method="POST", path=path, data={})
+
+ def test__get_next_page_bad_http_method(self):
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ mock.sentinel.api_request,
+ mock.sentinel.path,
+ mock.sentinel.item_to_value,
+ )
+ iterator._HTTP_METHOD = "NOT-A-VERB"
+
+ with pytest.raises(ValueError):
+ iterator._get_next_page_response()
+
+ @pytest.mark.parametrize(
+ "page_size,max_results,pages",
+ [(3, None, False), (3, 8, False), (3, None, True), (3, 8, True)],
+ )
+ def test_page_size_items(self, page_size, max_results, pages):
+ path = "/foo"
+ NITEMS = 10
+
+ n = [0] # blast you python 2!
+
+ def api_request(*args, **kw):
+ assert not args
+ query_params = dict(
+ maxResults=(
+ page_size
+ if max_results is None
+ else min(page_size, max_results - n[0])
+ )
+ )
+ if n[0]:
+ query_params.update(pageToken="test")
+ assert kw == {"method": "GET", "path": "/foo", "query_params": query_params}
+ n_items = min(kw["query_params"]["maxResults"], NITEMS - n[0])
+ items = [dict(name=str(i + n[0])) for i in range(n_items)]
+ n[0] += n_items
+ result = dict(items=items)
+ if n[0] < NITEMS:
+ result.update(nextPageToken="test")
+ return result
+
+ iterator = page_iterator.HTTPIterator(
+ mock.sentinel.client,
+ api_request,
+ path=path,
+ item_to_value=page_iterator._item_to_value_identity,
+ page_size=page_size,
+ max_results=max_results,
+ )
+
+ assert iterator.num_results == 0
+
+ n_results = max_results if max_results is not None else NITEMS
+ if pages:
+ items_iter = iter(iterator.pages)
+ npages = int(math.ceil(float(n_results) / page_size))
+ for ipage in range(npages):
+ assert list(next(items_iter)) == [
+ dict(name=str(i))
+ for i in range(
+ ipage * page_size, min((ipage + 1) * page_size, n_results),
+ )
+ ]
+ else:
+ items_iter = iter(iterator)
+ for i in range(n_results):
+ assert next(items_iter) == dict(name=str(i))
+ assert iterator.num_results == i + 1
+
+ with pytest.raises(StopIteration):
+ next(items_iter)
+
+
+class TestGRPCIterator(object):
+ def test_constructor(self):
+ client = mock.sentinel.client
+ items_field = "items"
+ iterator = page_iterator.GRPCIterator(
+ client, mock.sentinel.method, mock.sentinel.request, items_field
+ )
+
+ assert not iterator._started
+ assert iterator.client is client
+ assert iterator.max_results is None
+ assert iterator.item_to_value is page_iterator._item_to_value_identity
+ assert iterator._method == mock.sentinel.method
+ assert iterator._request == mock.sentinel.request
+ assert iterator._items_field == items_field
+ assert (
+ iterator._request_token_field
+ == page_iterator.GRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD
+ )
+ assert (
+ iterator._response_token_field
+ == page_iterator.GRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD
+ )
+ # Changing attributes.
+ assert iterator.page_number == 0
+ assert iterator.next_page_token is None
+ assert iterator.num_results == 0
+
+ def test_constructor_options(self):
+ client = mock.sentinel.client
+ items_field = "items"
+ request_field = "request"
+ response_field = "response"
+ iterator = page_iterator.GRPCIterator(
+ client,
+ mock.sentinel.method,
+ mock.sentinel.request,
+ items_field,
+ item_to_value=mock.sentinel.item_to_value,
+ request_token_field=request_field,
+ response_token_field=response_field,
+ max_results=42,
+ )
+
+ assert iterator.client is client
+ assert iterator.max_results == 42
+ assert iterator.item_to_value is mock.sentinel.item_to_value
+ assert iterator._method == mock.sentinel.method
+ assert iterator._request == mock.sentinel.request
+ assert iterator._items_field == items_field
+ assert iterator._request_token_field == request_field
+ assert iterator._response_token_field == response_field
+
+ def test_iterate(self):
+ request = mock.Mock(spec=["page_token"], page_token=None)
+ response1 = mock.Mock(items=["a", "b"], next_page_token="1")
+ response2 = mock.Mock(items=["c"], next_page_token="2")
+ response3 = mock.Mock(items=["d"], next_page_token="")
+ method = mock.Mock(side_effect=[response1, response2, response3])
+ iterator = page_iterator.GRPCIterator(
+ mock.sentinel.client, method, request, "items"
+ )
+
+ assert iterator.num_results == 0
+
+ items = list(iterator)
+ assert items == ["a", "b", "c", "d"]
+
+ method.assert_called_with(request)
+ assert method.call_count == 3
+ assert request.page_token == "2"
+
+ def test_iterate_with_max_results(self):
+ request = mock.Mock(spec=["page_token"], page_token=None)
+ response1 = mock.Mock(items=["a", "b"], next_page_token="1")
+ response2 = mock.Mock(items=["c"], next_page_token="2")
+ response3 = mock.Mock(items=["d"], next_page_token="")
+ method = mock.Mock(side_effect=[response1, response2, response3])
+ iterator = page_iterator.GRPCIterator(
+ mock.sentinel.client, method, request, "items", max_results=3
+ )
+
+ assert iterator.num_results == 0
+
+ items = list(iterator)
+
+ assert items == ["a", "b", "c"]
+ assert iterator.num_results == 3
+
+ method.assert_called_with(request)
+ assert method.call_count == 2
+ assert request.page_token == "1"
+
+
+class GAXPageIterator(object):
+ """Fake object that matches gax.PageIterator"""
+
+ def __init__(self, pages, page_token=None):
+ self._pages = iter(pages)
+ self.page_token = page_token
+
+ def next(self):
+ return next(self._pages)
+
+ __next__ = next
+
+
+class TestGAXIterator(object):
+ def test_constructor(self):
+ client = mock.sentinel.client
+ token = "zzzyy78kl"
+ page_iter = GAXPageIterator((), page_token=token)
+ item_to_value = page_iterator._item_to_value_identity
+ max_results = 1337
+ iterator = page_iterator._GAXIterator(
+ client, page_iter, item_to_value, max_results=max_results
+ )
+
+ assert not iterator._started
+ assert iterator.client is client
+ assert iterator.item_to_value is item_to_value
+ assert iterator.max_results == max_results
+ assert iterator._gax_page_iter is page_iter
+ # Changing attributes.
+ assert iterator.page_number == 0
+ assert iterator.next_page_token == token
+ assert iterator.num_results == 0
+
+ def test__next_page(self):
+ page_items = (29, 31)
+ page_token = "2sde98ds2s0hh"
+ page_iter = GAXPageIterator([page_items], page_token=page_token)
+ iterator = page_iterator._GAXIterator(
+ mock.sentinel.client, page_iter, page_iterator._item_to_value_identity
+ )
+
+ page = iterator._next_page()
+
+ assert iterator.next_page_token == page_token
+ assert isinstance(page, page_iterator.Page)
+ assert list(page) == list(page_items)
+
+ next_page = iterator._next_page()
+
+ assert next_page is None
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
new file mode 100644
index 0000000..2c5216e
--- /dev/null
+++ b/tests/unit/test_path_template.py
@@ -0,0 +1,389 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import unicode_literals
+
+import mock
+import pytest
+
+from google.api_core import path_template
+
+
+@pytest.mark.parametrize(
+ "tmpl, args, kwargs, expected_result",
+ [
+ # Basic positional params
+ ["/v1/*", ["a"], {}, "/v1/a"],
+ ["/v1/**", ["a/b"], {}, "/v1/a/b"],
+ ["/v1/*/*", ["a", "b"], {}, "/v1/a/b"],
+ ["/v1/*/*/**", ["a", "b", "c/d"], {}, "/v1/a/b/c/d"],
+ # Basic named params
+ ["/v1/{name}", [], {"name": "parent"}, "/v1/parent"],
+ ["/v1/{name=**}", [], {"name": "parent/child"}, "/v1/parent/child"],
+ # Named params with a sub-template
+ ["/v1/{name=parent/*}", [], {"name": "parent/child"}, "/v1/parent/child"],
+ [
+ "/v1/{name=parent/**}",
+ [],
+ {"name": "parent/child/object"},
+ "/v1/parent/child/object",
+ ],
+ # Combining positional and named params
+ ["/v1/*/{name}", ["a"], {"name": "parent"}, "/v1/a/parent"],
+ ["/v1/{name}/*", ["a"], {"name": "parent"}, "/v1/parent/a"],
+ [
+ "/v1/{parent}/*/{child}/*",
+ ["a", "b"],
+ {"parent": "thor", "child": "thorson"},
+ "/v1/thor/a/thorson/b",
+ ],
+ ["/v1/{name}/**", ["a/b"], {"name": "parent"}, "/v1/parent/a/b"],
+ # Combining positional and named params with sub-templates.
+ [
+ "/v1/{name=parent/*}/*",
+ ["a"],
+ {"name": "parent/child"},
+ "/v1/parent/child/a",
+ ],
+ [
+ "/v1/*/{name=parent/**}",
+ ["a"],
+ {"name": "parent/child/object"},
+ "/v1/a/parent/child/object",
+ ],
+ ],
+)
+def test_expand_success(tmpl, args, kwargs, expected_result):
+ result = path_template.expand(tmpl, *args, **kwargs)
+ assert result == expected_result
+ assert path_template.validate(tmpl, result)
+
+
+@pytest.mark.parametrize(
+ "tmpl, args, kwargs, exc_match",
+ [
+ # Missing positional arg.
+ ["v1/*", [], {}, "Positional"],
+ # Missing named arg.
+ ["v1/{name}", [], {}, "Named"],
+ ],
+)
+def test_expanded_failure(tmpl, args, kwargs, exc_match):
+ with pytest.raises(ValueError, match=exc_match):
+ path_template.expand(tmpl, *args, **kwargs)
+
+
+@pytest.mark.parametrize(
+ "request_obj, field, expected_result",
+ [
+ [{"field": "stringValue"}, "field", "stringValue"],
+ [{"field": "stringValue"}, "nosuchfield", None],
+ [{"field": "stringValue"}, "field.subfield", None],
+ [{"field": {"subfield": "stringValue"}}, "field", None],
+ [{"field": {"subfield": "stringValue"}}, "field.subfield", "stringValue"],
+ [{"field": {"subfield": [1, 2, 3]}}, "field.subfield", [1, 2, 3]],
+ [{"field": {"subfield": "stringValue"}}, "field", None],
+ [{"field": {"subfield": "stringValue"}}, "field.nosuchfield", None],
+ [
+ {"field": {"subfield": {"subsubfield": "stringValue"}}},
+ "field.subfield.subsubfield",
+ "stringValue",
+ ],
+ ["string", "field", None],
+ ],
+)
+def test_get_field(request_obj, field, expected_result):
+ result = path_template.get_field(request_obj, field)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "request_obj, field, expected_result",
+ [
+ [{"field": "stringValue"}, "field", {}],
+ [{"field": "stringValue"}, "nosuchfield", {"field": "stringValue"}],
+ [{"field": "stringValue"}, "field.subfield", {"field": "stringValue"}],
+ [{"field": {"subfield": "stringValue"}}, "field.subfield", {"field": {}}],
+ [
+ {"field": {"subfield": "stringValue", "q": "w"}, "e": "f"},
+ "field.subfield",
+ {"field": {"q": "w"}, "e": "f"},
+ ],
+ [
+ {"field": {"subfield": "stringValue"}},
+ "field.nosuchfield",
+ {"field": {"subfield": "stringValue"}},
+ ],
+ [
+ {"field": {"subfield": {"subsubfield": "stringValue", "q": "w"}}},
+ "field.subfield.subsubfield",
+ {"field": {"subfield": {"q": "w"}}},
+ ],
+ ["string", "field", "string"],
+ ["string", "field.subfield", "string"],
+ ],
+)
+def test_delete_field(request_obj, field, expected_result):
+ path_template.delete_field(request_obj, field)
+ assert request_obj == expected_result
+
+
+@pytest.mark.parametrize(
+ "tmpl, path",
+ [
+ # Single segment template, but multi segment value
+ ["v1/*", "v1/a/b"],
+ ["v1/*/*", "v1/a/b/c"],
+ # Single segement named template, but multi segment value
+ ["v1/{name}", "v1/a/b"],
+ ["v1/{name}/{value}", "v1/a/b/c"],
+ # Named value with a sub-template but invalid value
+ ["v1/{name=parent/*}", "v1/grandparent/child"],
+ ],
+)
+def test_validate_failure(tmpl, path):
+ assert not path_template.validate(tmpl, path)
+
+
+def test__expand_variable_match_unexpected():
+ match = mock.Mock(spec=["group"])
+ match.group.return_value = None
+ with pytest.raises(ValueError, match="Unknown"):
+ path_template._expand_variable_match([], {}, match)
+
+
+def test__replace_variable_with_pattern():
+ match = mock.Mock(spec=["group"])
+ match.group.return_value = None
+ with pytest.raises(ValueError, match="Unknown"):
+ path_template._replace_variable_with_pattern(match)
+
+
+@pytest.mark.parametrize(
+ "http_options, request_kwargs, expected_result",
+ [
+ [
+ [["get", "/v1/no/template", ""]],
+ {"foo": "bar"},
+ ["get", "/v1/no/template", {}, {"foo": "bar"}],
+ ],
+ # Single templates
+ [
+ [["get", "/v1/{field}", ""]],
+ {"field": "parent"},
+ ["get", "/v1/parent", {}, {}],
+ ],
+ [
+ [["get", "/v1/{field.sub}", ""]],
+ {"field": {"sub": "parent"}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
+ ],
+ ],
+)
+def test_transcode_base_case(http_options, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, request_kwargs, expected_result",
+ [
+ [
+ [["get", "/v1/{field.subfield}", ""]],
+ {"field": {"subfield": "parent"}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{field.subfield.subsubfield}", ""]],
+ {"field": {"subfield": {"subsubfield": "parent"}}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {"subfield": {}}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{field.subfield1}/{field.subfield2}", ""]],
+ {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
+ ["get", "/v1/parent/child", {}, {"field": {}, "foo": "bar"}],
+ ],
+ ],
+)
+def test_transcode_subfields(http_options, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, request_kwargs, expected_result",
+ [
+ # Single segment wildcard
+ [
+ [["get", "/v1/{field=*}", ""]],
+ {"field": "parent"},
+ ["get", "/v1/parent", {}, {}],
+ ],
+ [
+ [["get", "/v1/{field=a/*/b/*}", ""]],
+ {"field": "a/parent/b/child", "foo": "bar"},
+ ["get", "/v1/a/parent/b/child", {}, {"foo": "bar"}],
+ ],
+ # Double segment wildcard
+ [
+ [["get", "/v1/{field=**}", ""]],
+ {"field": "parent/p1"},
+ ["get", "/v1/parent/p1", {}, {}],
+ ],
+ [
+ [["get", "/v1/{field=a/**/b/**}", ""]],
+ {"field": "a/parent/p1/b/child/c1", "foo": "bar"},
+ ["get", "/v1/a/parent/p1/b/child/c1", {}, {"foo": "bar"}],
+ ],
+ # Combined single and double segment wildcard
+ [
+ [["get", "/v1/{field=a/*/b/**}", ""]],
+ {"field": "a/parent/b/child/c1"},
+ ["get", "/v1/a/parent/b/child/c1", {}, {}],
+ ],
+ [
+ [["get", "/v1/{field=a/**/b/*}/v2/{name}", ""]],
+ {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
+ ["get", "/v1/a/parent/p1/b/child/v2/first", {}, {"foo": "bar"}],
+ ],
+ ],
+)
+def test_transcode_with_wildcard(http_options, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, request_kwargs, expected_result",
+ [
+ # Single field body
+ [
+ [["post", "/v1/no/template", "data"]],
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ ["post", "/v1/no/template", {"id": 1, "info": "some info"}, {"foo": "bar"}],
+ ],
+ [
+ [["post", "/v1/{field=a/*}/b/{name=**}", "data"]],
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"id": 1, "info": "some info"},
+ {"foo": "bar"},
+ ],
+ ],
+ # Wildcard body
+ [
+ [["post", "/v1/{field=a/*}/b/{name=**}", "*"]],
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ {},
+ ],
+ ],
+ ],
+)
+def test_transcode_with_body(http_options, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, request_kwargs, expected_result",
+ [
+ # Additional bindings
+ [
+ [
+ ["post", "/v1/{field=a/*}/b/{name=**}", "extra_data"],
+ ["post", "/v1/{field=a/*}/b/{name=**}", "*"],
+ ],
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ {},
+ ],
+ ],
+ [
+ [
+ ["get", "/v1/{field=a/*}/b/{name=**}", ""],
+ ["get", "/v1/{field=a/*}/b/first/last", ""],
+ ],
+ {"field": "a/parent", "foo": "bar"},
+ ["get", "/v1/a/parent/b/first/last", {}, {"foo": "bar"}],
+ ],
+ ],
+)
+def test_transcode_with_additional_bindings(
+ http_options, request_kwargs, expected_result
+):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, request_kwargs",
+ [
+ [[["get", "/v1/{name}", ""]], {"foo": "bar"}],
+ [[["get", "/v1/{name}", ""]], {"name": "first/last"}],
+ [[["get", "/v1/{name=mr/*/*}", ""]], {"name": "first/last"}],
+ [[["post", "/v1/{name}", "data"]], {"name": "first/last"}],
+ ],
+)
+def test_transcode_fails(http_options, request_kwargs):
+ http_options, _ = helper_test_transcode(http_options, range(4))
+ with pytest.raises(ValueError):
+ path_template.transcode(http_options, **request_kwargs)
+
+
+def helper_test_transcode(http_options_list, expected_result_list):
+ http_options = []
+ for opt_list in http_options_list:
+ http_option = {"method": opt_list[0], "uri": opt_list[1]}
+ if opt_list[2]:
+ http_option["body"] = opt_list[2]
+ http_options.append(http_option)
+
+ expected_result = {
+ "method": expected_result_list[0],
+ "uri": expected_result_list[1],
+ "query_params": expected_result_list[3],
+ }
+ if expected_result_list[2]:
+ expected_result["body"] = expected_result_list[2]
+
+ return (http_options, expected_result)
diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py
new file mode 100644
index 0000000..3df45df
--- /dev/null
+++ b/tests/unit/test_protobuf_helpers.py
@@ -0,0 +1,518 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+import pytest
+
+from google.api import http_pb2
+from google.api_core import protobuf_helpers
+from google.longrunning import operations_pb2
+from google.protobuf import any_pb2
+from google.protobuf import message
+from google.protobuf import source_context_pb2
+from google.protobuf import struct_pb2
+from google.protobuf import timestamp_pb2
+from google.protobuf import type_pb2
+from google.protobuf import wrappers_pb2
+from google.type import color_pb2
+from google.type import date_pb2
+from google.type import timeofday_pb2
+
+
+def test_from_any_pb_success():
+ in_message = date_pb2.Date(year=1990)
+ in_message_any = any_pb2.Any()
+ in_message_any.Pack(in_message)
+ out_message = protobuf_helpers.from_any_pb(date_pb2.Date, in_message_any)
+
+ assert in_message == out_message
+
+
+def test_from_any_pb_wrapped_success():
+ # Declare a message class conforming to wrapped messages.
+ class WrappedDate(object):
+ def __init__(self, **kwargs):
+ self._pb = date_pb2.Date(**kwargs)
+
+ def __eq__(self, other):
+ return self._pb == other
+
+ @classmethod
+ def pb(cls, msg):
+ return msg._pb
+
+ # Run the same test as `test_from_any_pb_success`, but using the
+ # wrapped class.
+ in_message = date_pb2.Date(year=1990)
+ in_message_any = any_pb2.Any()
+ in_message_any.Pack(in_message)
+ out_message = protobuf_helpers.from_any_pb(WrappedDate, in_message_any)
+
+ assert out_message == in_message
+
+
+def test_from_any_pb_failure():
+ in_message = any_pb2.Any()
+ in_message.Pack(date_pb2.Date(year=1990))
+
+ with pytest.raises(TypeError):
+ protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message)
+
+
+def test_check_protobuf_helpers_ok():
+ assert protobuf_helpers.check_oneof() is None
+ assert protobuf_helpers.check_oneof(foo="bar") is None
+ assert protobuf_helpers.check_oneof(foo="bar", baz=None) is None
+ assert protobuf_helpers.check_oneof(foo=None, baz="bacon") is None
+ assert protobuf_helpers.check_oneof(foo="bar", spam=None, eggs=None) is None
+
+
+def test_check_protobuf_helpers_failures():
+ with pytest.raises(ValueError):
+ protobuf_helpers.check_oneof(foo="bar", spam="eggs")
+ with pytest.raises(ValueError):
+ protobuf_helpers.check_oneof(foo="bar", baz="bacon", spam="eggs")
+ with pytest.raises(ValueError):
+ protobuf_helpers.check_oneof(foo="bar", spam=0, eggs=None)
+
+
+def test_get_messages():
+ answer = protobuf_helpers.get_messages(date_pb2)
+
+ # Ensure that Date was exported properly.
+ assert answer["Date"] is date_pb2.Date
+
+ # Ensure that no non-Message objects were exported.
+ for value in answer.values():
+ assert issubclass(value, message.Message)
+
+
+def test_get_dict_absent():
+ with pytest.raises(KeyError):
+ assert protobuf_helpers.get({}, "foo")
+
+
+def test_get_dict_present():
+ assert protobuf_helpers.get({"foo": "bar"}, "foo") == "bar"
+
+
+def test_get_dict_default():
+ assert protobuf_helpers.get({}, "foo", default="bar") == "bar"
+
+
+def test_get_dict_nested():
+ assert protobuf_helpers.get({"foo": {"bar": "baz"}}, "foo.bar") == "baz"
+
+
+def test_get_dict_nested_default():
+ assert protobuf_helpers.get({}, "foo.baz", default="bacon") == "bacon"
+ assert protobuf_helpers.get({"foo": {}}, "foo.baz", default="bacon") == "bacon"
+
+
+def test_get_msg_sentinel():
+ msg = timestamp_pb2.Timestamp()
+ with pytest.raises(KeyError):
+ assert protobuf_helpers.get(msg, "foo")
+
+
+def test_get_msg_present():
+ msg = timestamp_pb2.Timestamp(seconds=42)
+ assert protobuf_helpers.get(msg, "seconds") == 42
+
+
+def test_get_msg_default():
+ msg = timestamp_pb2.Timestamp()
+ assert protobuf_helpers.get(msg, "foo", default="bar") == "bar"
+
+
+def test_invalid_object():
+ with pytest.raises(TypeError):
+ protobuf_helpers.get(object(), "foo", "bar")
+
+
+def test_set_dict():
+ mapping = {}
+ protobuf_helpers.set(mapping, "foo", "bar")
+ assert mapping == {"foo": "bar"}
+
+
+def test_set_msg():
+ msg = timestamp_pb2.Timestamp()
+ protobuf_helpers.set(msg, "seconds", 42)
+ assert msg.seconds == 42
+
+
+def test_set_dict_nested():
+ mapping = {}
+ protobuf_helpers.set(mapping, "foo.bar", "baz")
+ assert mapping == {"foo": {"bar": "baz"}}
+
+
+def test_set_invalid_object():
+ with pytest.raises(TypeError):
+ protobuf_helpers.set(object(), "foo", "bar")
+
+
+def test_set_list():
+ list_ops_response = operations_pb2.ListOperationsResponse()
+
+ protobuf_helpers.set(
+ list_ops_response,
+ "operations",
+ [{"name": "foo"}, operations_pb2.Operation(name="bar")],
+ )
+
+ assert len(list_ops_response.operations) == 2
+
+ for operation in list_ops_response.operations:
+ assert isinstance(operation, operations_pb2.Operation)
+
+ assert list_ops_response.operations[0].name == "foo"
+ assert list_ops_response.operations[1].name == "bar"
+
+
+def test_set_list_clear_existing():
+ list_ops_response = operations_pb2.ListOperationsResponse(
+ operations=[{"name": "baz"}]
+ )
+
+ protobuf_helpers.set(
+ list_ops_response,
+ "operations",
+ [{"name": "foo"}, operations_pb2.Operation(name="bar")],
+ )
+
+ assert len(list_ops_response.operations) == 2
+ for operation in list_ops_response.operations:
+ assert isinstance(operation, operations_pb2.Operation)
+ assert list_ops_response.operations[0].name == "foo"
+ assert list_ops_response.operations[1].name == "bar"
+
+
+def test_set_msg_with_msg_field():
+ rule = http_pb2.HttpRule()
+ pattern = http_pb2.CustomHttpPattern(kind="foo", path="bar")
+
+ protobuf_helpers.set(rule, "custom", pattern)
+
+ assert rule.custom.kind == "foo"
+ assert rule.custom.path == "bar"
+
+
+def test_set_msg_with_dict_field():
+ rule = http_pb2.HttpRule()
+ pattern = {"kind": "foo", "path": "bar"}
+
+ protobuf_helpers.set(rule, "custom", pattern)
+
+ assert rule.custom.kind == "foo"
+ assert rule.custom.path == "bar"
+
+
+def test_set_msg_nested_key():
+ rule = http_pb2.HttpRule(custom=http_pb2.CustomHttpPattern(kind="foo", path="bar"))
+
+ protobuf_helpers.set(rule, "custom.kind", "baz")
+
+ assert rule.custom.kind == "baz"
+ assert rule.custom.path == "bar"
+
+
+def test_setdefault_dict_unset():
+ mapping = {}
+ protobuf_helpers.setdefault(mapping, "foo", "bar")
+ assert mapping == {"foo": "bar"}
+
+
+def test_setdefault_dict_falsy():
+ mapping = {"foo": None}
+ protobuf_helpers.setdefault(mapping, "foo", "bar")
+ assert mapping == {"foo": "bar"}
+
+
+def test_setdefault_dict_truthy():
+ mapping = {"foo": "bar"}
+ protobuf_helpers.setdefault(mapping, "foo", "baz")
+ assert mapping == {"foo": "bar"}
+
+
+def test_setdefault_pb2_falsy():
+ operation = operations_pb2.Operation()
+ protobuf_helpers.setdefault(operation, "name", "foo")
+ assert operation.name == "foo"
+
+
+def test_setdefault_pb2_truthy():
+ operation = operations_pb2.Operation(name="bar")
+ protobuf_helpers.setdefault(operation, "name", "foo")
+ assert operation.name == "bar"
+
+
+def test_field_mask_invalid_args():
+ with pytest.raises(ValueError):
+ protobuf_helpers.field_mask("foo", any_pb2.Any())
+ with pytest.raises(ValueError):
+ protobuf_helpers.field_mask(any_pb2.Any(), "bar")
+ with pytest.raises(ValueError):
+ protobuf_helpers.field_mask(any_pb2.Any(), operations_pb2.Operation())
+
+
+def test_field_mask_equal_values():
+ assert protobuf_helpers.field_mask(None, None).paths == []
+
+ original = struct_pb2.Value(number_value=1.0)
+ modified = struct_pb2.Value(number_value=1.0)
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0)])
+ modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0)])
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
+ modified = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+
+def test_field_mask_zero_values():
+ # Singular Values
+ original = color_pb2.Color(red=0.0)
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = None
+ modified = color_pb2.Color(red=0.0)
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ # Repeated Values
+ original = struct_pb2.ListValue(values=[])
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = None
+ modified = struct_pb2.ListValue(values=[])
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ # Maps
+ original = struct_pb2.Struct(fields={})
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = None
+ modified = struct_pb2.Struct(fields={})
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ # Oneofs
+ original = struct_pb2.Value(number_value=0.0)
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+ original = None
+ modified = struct_pb2.Value(number_value=0.0)
+ assert protobuf_helpers.field_mask(original, modified).paths == []
+
+
+def test_field_mask_singular_field_diffs():
+ original = type_pb2.Type(name="name")
+ modified = type_pb2.Type()
+ assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
+
+ original = type_pb2.Type(name="name")
+ modified = type_pb2.Type()
+ assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
+
+ original = None
+ modified = type_pb2.Type(name="name")
+ assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
+
+ original = type_pb2.Type(name="name")
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
+
+
+def test_field_mask_message_diffs():
+ original = type_pb2.Type()
+ modified = type_pb2.Type(
+ source_context=source_context_pb2.SourceContext(file_name="name")
+ )
+ assert protobuf_helpers.field_mask(original, modified).paths == [
+ "source_context.file_name"
+ ]
+
+ original = type_pb2.Type(
+ source_context=source_context_pb2.SourceContext(file_name="name")
+ )
+ modified = type_pb2.Type()
+ assert protobuf_helpers.field_mask(original, modified).paths == ["source_context"]
+
+ original = type_pb2.Type(
+ source_context=source_context_pb2.SourceContext(file_name="name")
+ )
+ modified = type_pb2.Type(
+ source_context=source_context_pb2.SourceContext(file_name="other_name")
+ )
+ assert protobuf_helpers.field_mask(original, modified).paths == [
+ "source_context.file_name"
+ ]
+
+ original = None
+ modified = type_pb2.Type(
+ source_context=source_context_pb2.SourceContext(file_name="name")
+ )
+ assert protobuf_helpers.field_mask(original, modified).paths == [
+ "source_context.file_name"
+ ]
+
+ original = type_pb2.Type(
+ source_context=source_context_pb2.SourceContext(file_name="name")
+ )
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == ["source_context"]
+
+
+def test_field_mask_wrapper_type_diffs():
+ original = color_pb2.Color()
+ modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
+
+ original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ modified = color_pb2.Color()
+ assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
+
+ original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0))
+ assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
+
+ original = None
+ modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0))
+ assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
+
+ original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
+
+
+def test_field_mask_repeated_diffs():
+ original = struct_pb2.ListValue()
+ modified = struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+ )
+ assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
+
+ original = struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+ )
+ modified = struct_pb2.ListValue()
+ assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
+
+ original = None
+ modified = struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+ )
+ assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
+
+ original = struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+ )
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
+
+ original = struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+ )
+ modified = struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=2.0), struct_pb2.Value(number_value=1.0)]
+ )
+ assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
+
+
+def test_field_mask_map_diffs():
+ original = struct_pb2.Struct()
+ modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+ assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
+
+ original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+ modified = struct_pb2.Struct()
+ assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
+
+ original = None
+ modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+ assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
+
+ original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+ modified = None
+ assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
+
+ original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+ modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=2.0)})
+ assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
+
+ original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+ modified = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
+ assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
+
+
+def test_field_mask_different_level_diffs():
+ original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
+ modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0), red=1.0)
+ assert sorted(protobuf_helpers.field_mask(original, modified).paths) == [
+ "alpha",
+ "red",
+ ]
+
+
+@pytest.mark.skipif(
+ sys.version_info.major == 2,
+ reason="Field names with trailing underscores can only be created"
+ "through proto-plus, which is Python 3 only.",
+)
+def test_field_mask_ignore_trailing_underscore():
+ import proto
+
+ class Foo(proto.Message):
+ type_ = proto.Field(proto.STRING, number=1)
+ input_config = proto.Field(proto.STRING, number=2)
+
+ modified = Foo(type_="bar", input_config="baz")
+
+ assert sorted(protobuf_helpers.field_mask(None, Foo.pb(modified)).paths) == [
+ "input_config",
+ "type",
+ ]
+
+
+@pytest.mark.skipif(
+ sys.version_info.major == 2,
+ reason="Field names with trailing underscores can only be created"
+ "through proto-plus, which is Python 3 only.",
+)
+def test_field_mask_ignore_trailing_underscore_with_nesting():
+ import proto
+
+ class Bar(proto.Message):
+ class Baz(proto.Message):
+ input_config = proto.Field(proto.STRING, number=1)
+
+ type_ = proto.Field(Baz, number=1)
+
+ modified = Bar()
+ modified.type_.input_config = "foo"
+
+ assert sorted(protobuf_helpers.field_mask(None, Bar.pb(modified)).paths) == [
+ "type.input_config",
+ ]
diff --git a/tests/unit/test_rest_helpers.py b/tests/unit/test_rest_helpers.py
new file mode 100644
index 0000000..5932fa5
--- /dev/null
+++ b/tests/unit/test_rest_helpers.py
@@ -0,0 +1,77 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import rest_helpers
+
+
+def test_flatten_simple_value():
+ with pytest.raises(TypeError):
+ rest_helpers.flatten_query_params("abc")
+
+
+def test_flatten_list():
+ with pytest.raises(TypeError):
+ rest_helpers.flatten_query_params(["abc", "def"])
+
+
+def test_flatten_none():
+ assert rest_helpers.flatten_query_params(None) == []
+
+
+def test_flatten_empty_dict():
+ assert rest_helpers.flatten_query_params({}) == []
+
+
+def test_flatten_simple_dict():
+ assert rest_helpers.flatten_query_params({"a": "abc", "b": "def"}) == [
+ ("a", "abc"),
+ ("b", "def"),
+ ]
+
+
+def test_flatten_repeated_field():
+ assert rest_helpers.flatten_query_params({"a": ["x", "y", "z", None]}) == [
+ ("a", "x"),
+ ("a", "y"),
+ ("a", "z"),
+ ]
+
+
+def test_flatten_nested_dict():
+ obj = {"a": {"b": {"c": ["x", "y", "z"]}}, "d": {"e": "uvw"}}
+ expected_result = [("a.b.c", "x"), ("a.b.c", "y"), ("a.b.c", "z"), ("d.e", "uvw")]
+
+ assert rest_helpers.flatten_query_params(obj) == expected_result
+
+
+def test_flatten_repeated_dict():
+ obj = {
+ "a": {"b": {"c": [{"v": 1}, {"v": 2}]}},
+ "d": "uvw",
+ }
+
+ with pytest.raises(ValueError):
+ rest_helpers.flatten_query_params(obj)
+
+
+def test_flatten_repeated_list():
+ obj = {
+ "a": {"b": {"c": [["e", "f"], ["g", "h"]]}},
+ "d": "uvw",
+ }
+
+ with pytest.raises(ValueError):
+ rest_helpers.flatten_query_params(obj)
diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py
new file mode 100644
index 0000000..199ca55
--- /dev/null
+++ b/tests/unit/test_retry.py
@@ -0,0 +1,458 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import itertools
+import re
+
+import mock
+import pytest
+import requests.exceptions
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.auth import exceptions as auth_exceptions
+
+
+def test_if_exception_type():
+ predicate = retry.if_exception_type(ValueError)
+
+ assert predicate(ValueError())
+ assert not predicate(TypeError())
+
+
+def test_if_exception_type_multiple():
+ predicate = retry.if_exception_type(ValueError, TypeError)
+
+ assert predicate(ValueError())
+ assert predicate(TypeError())
+ assert not predicate(RuntimeError())
+
+
+def test_if_transient_error():
+ assert retry.if_transient_error(exceptions.InternalServerError(""))
+ assert retry.if_transient_error(exceptions.TooManyRequests(""))
+ assert retry.if_transient_error(exceptions.ServiceUnavailable(""))
+ assert retry.if_transient_error(requests.exceptions.ConnectionError(""))
+ assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError(""))
+ assert retry.if_transient_error(auth_exceptions.TransportError(""))
+ assert not retry.if_transient_error(exceptions.InvalidArgument(""))
+
+
+# Make uniform return half of its maximum, which will be the calculated
+# sleep time.
+@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+def test_exponential_sleep_generator_base_2(uniform):
+ gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
+
+ result = list(itertools.islice(gen, 8))
+ assert result == [1, 2, 4, 8, 16, 32, 60, 60]
+
+
+@mock.patch("time.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+def test_retry_target_success(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ call_count = [0]
+
+ def target():
+ call_count[0] += 1
+ if call_count[0] < 3:
+ raise ValueError()
+ return 42
+
+ result = retry.retry_target(target, predicate, range(10), None)
+
+ assert result == 42
+ assert call_count[0] == 3
+ sleep.assert_has_calls([mock.call(0), mock.call(1)])
+
+
+@mock.patch("time.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+def test_retry_target_w_on_error(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ call_count = {"target": 0}
+ to_raise = ValueError()
+
+ def target():
+ call_count["target"] += 1
+ if call_count["target"] < 3:
+ raise to_raise
+ return 42
+
+ on_error = mock.Mock()
+
+ result = retry.retry_target(target, predicate, range(10), None, on_error=on_error)
+
+ assert result == 42
+ assert call_count["target"] == 3
+
+ on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
+ sleep.assert_has_calls([mock.call(0), mock.call(1)])
+
+
+@mock.patch("time.sleep", autospec=True)
+@mock.patch(
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+def test_retry_target_non_retryable_error(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ exception = TypeError()
+ target = mock.Mock(side_effect=exception)
+
+ with pytest.raises(TypeError) as exc_info:
+ retry.retry_target(target, predicate, range(10), None)
+
+ assert exc_info.value == exception
+ sleep.assert_not_called()
+
+
+@mock.patch("time.sleep", autospec=True)
+@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
+def test_retry_target_deadline_exceeded(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ exception = ValueError("meep")
+ target = mock.Mock(side_effect=exception)
+ # Setup the timeline so that the first call takes 5 seconds but the second
+ # call takes 6, which puts the retry over the deadline.
+ utcnow.side_effect = [
+ # The first call to utcnow establishes the start of the timeline.
+ datetime.datetime.min,
+ datetime.datetime.min + datetime.timedelta(seconds=5),
+ datetime.datetime.min + datetime.timedelta(seconds=11),
+ ]
+
+ with pytest.raises(exceptions.RetryError) as exc_info:
+ retry.retry_target(target, predicate, range(10), deadline=10)
+
+ assert exc_info.value.cause == exception
+ assert exc_info.match("Deadline of 10.0s exceeded")
+ assert exc_info.match("last exception: meep")
+ assert target.call_count == 2
+
+
+def test_retry_target_bad_sleep_generator():
+ with pytest.raises(ValueError, match="Sleep generator"):
+ retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
+
+
+class TestRetry(object):
+ def test_constructor_defaults(self):
+ retry_ = retry.Retry()
+ assert retry_._predicate == retry.if_transient_error
+ assert retry_._initial == 1
+ assert retry_._maximum == 60
+ assert retry_._multiplier == 2
+ assert retry_._deadline == 120
+ assert retry_._on_error is None
+ assert retry_.deadline == 120
+
+ def test_constructor_options(self):
+ _some_function = mock.Mock()
+
+ retry_ = retry.Retry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=_some_function,
+ )
+ assert retry_._predicate == mock.sentinel.predicate
+ assert retry_._initial == 1
+ assert retry_._maximum == 2
+ assert retry_._multiplier == 3
+ assert retry_._deadline == 4
+ assert retry_._on_error is _some_function
+
+ def test_with_deadline(self):
+ retry_ = retry.Retry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_deadline(42)
+ assert retry_ is not new_retry
+ assert new_retry._deadline == 42
+
+ # the rest of the attributes should remain the same
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_predicate(self):
+ retry_ = retry.Retry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_predicate(mock.sentinel.predicate)
+ assert retry_ is not new_retry
+ assert new_retry._predicate == mock.sentinel.predicate
+
+ # the rest of the attributes should remain the same
+ assert new_retry._deadline == retry_._deadline
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_noop(self):
+ retry_ = retry.Retry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay()
+ assert retry_ is not new_retry
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+
+ def test_with_delay(self):
+ retry_ = retry.Retry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 5
+ assert new_retry._maximum == 6
+ assert new_retry._multiplier == 7
+
+ # the rest of the attributes should remain the same
+ assert new_retry._deadline == retry_._deadline
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_partial_options(self):
+ retry_ = retry.Retry(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ deadline=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(initial=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 4
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 3
+
+ new_retry = retry_.with_delay(maximum=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 4
+ assert new_retry._multiplier == 3
+
+ new_retry = retry_.with_delay(multiplier=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 4
+
+ # the rest of the attributes should remain the same
+ assert new_retry._deadline == retry_._deadline
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry.Retry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ deadline=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<Retry predicate=<function.*?if_exception_type.*?>, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___and_execute_success(self, sleep):
+ retry_ = retry.Retry()
+ target = mock.Mock(spec=["__call__"], return_value=42)
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target)
+ target.assert_not_called()
+
+ result = decorated("meep")
+
+ assert result == 42
+ target.assert_called_once_with("meep")
+ sleep.assert_not_called()
+
+ # Make uniform return half of its maximum, which is the calculated sleep time.
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___and_execute_retry(self, sleep, uniform):
+
+ on_error = mock.Mock(spec=["__call__"], side_effect=[None])
+ retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError))
+
+ target = mock.Mock(spec=["__call__"], side_effect=[ValueError(), 42])
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target, on_error=on_error)
+ target.assert_not_called()
+
+ result = decorated("meep")
+
+ assert result == 42
+ assert target.call_count == 2
+ target.assert_has_calls([mock.call("meep"), mock.call("meep")])
+ sleep.assert_called_once_with(retry_._initial)
+ assert on_error.call_count == 1
+
+ # Make uniform return half of its maximum, which is the calculated sleep time.
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
+
+ on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
+ retry_ = retry.Retry(
+ predicate=retry.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ deadline=9.9,
+ )
+
+ utcnow = datetime.datetime.utcnow()
+ utcnow_patcher = mock.patch(
+ "google.api_core.datetime_helpers.utcnow", return_value=utcnow
+ )
+
+ target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10)
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target, on_error=on_error)
+ target.assert_not_called()
+
+ with utcnow_patcher as patched_utcnow:
+ # Make sure that calls to fake time.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay)
+
+ sleep.side_effect = increase_time
+
+ with pytest.raises(exceptions.RetryError):
+ decorated("meep")
+
+ assert target.call_count == 5
+ target.assert_has_calls([mock.call("meep")] * 5)
+ assert on_error.call_count == 5
+
+ # check the delays
+ assert sleep.call_count == 4 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+
+ assert last_wait == 2.9 # and not 8.0, because the last delay was shortened
+ assert total_wait == 9.9 # the same as the deadline
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___init___without_retry_executed(self, sleep):
+ _some_function = mock.Mock()
+
+ retry_ = retry.Retry(
+ predicate=retry.if_exception_type(ValueError), on_error=_some_function
+ )
+ # check the proper creation of the class
+ assert retry_._on_error is _some_function
+
+ target = mock.Mock(spec=["__call__"], side_effect=[42])
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ wrapped = retry_(target)
+
+ result = wrapped("meep")
+
+ assert result == 42
+ target.assert_called_once_with("meep")
+ sleep.assert_not_called()
+ _some_function.assert_not_called()
+
+ # Make uniform return half of its maximum, which is the calculated sleep time.
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("time.sleep", autospec=True)
+ def test___init___when_retry_is_executed(self, sleep, uniform):
+ _some_function = mock.Mock()
+
+ retry_ = retry.Retry(
+ predicate=retry.if_exception_type(ValueError), on_error=_some_function
+ )
+ # check the proper creation of the class
+ assert retry_._on_error is _some_function
+
+ target = mock.Mock(
+ spec=["__call__"], side_effect=[ValueError(), ValueError(), 42]
+ )
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ wrapped = retry_(target)
+ target.assert_not_called()
+
+ result = wrapped("meep")
+
+ assert result == 42
+ assert target.call_count == 3
+ assert _some_function.call_count == 2
+ target.assert_has_calls([mock.call("meep"), mock.call("meep")])
+ sleep.assert_any_call(retry_._initial)
diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py
new file mode 100644
index 0000000..30d624e
--- /dev/null
+++ b/tests/unit/test_timeout.py
@@ -0,0 +1,129 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import itertools
+
+import mock
+
+from google.api_core import timeout
+
+
+def test__exponential_timeout_generator_base_2():
+ gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
+
+ result = list(itertools.islice(gen, 8))
+ assert result == [1, 2, 4, 8, 16, 32, 60, 60]
+
+
+@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
+def test__exponential_timeout_generator_base_deadline(utcnow):
+ # Make each successive call to utcnow() advance one second.
+ utcnow.side_effect = [
+ datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15)
+ ]
+
+ gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
+
+ result = list(itertools.islice(gen, 14))
+ # Should grow until the cumulative time is > 30s, then start decreasing as
+ # the cumulative time approaches 60s.
+ assert result == [1, 2, 4, 8, 16, 24, 23, 22, 21, 20, 19, 18, 17, 16]
+
+
+class TestConstantTimeout(object):
+ def test_constructor(self):
+ timeout_ = timeout.ConstantTimeout()
+ assert timeout_._timeout is None
+
+ def test_constructor_args(self):
+ timeout_ = timeout.ConstantTimeout(42.0)
+ assert timeout_._timeout == 42.0
+
+ def test___str__(self):
+ timeout_ = timeout.ConstantTimeout(1)
+ assert str(timeout_) == "<ConstantTimeout timeout=1.0>"
+
+ def test_apply(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+ timeout_ = timeout.ConstantTimeout(42.0)
+ wrapped = timeout_(target)
+
+ wrapped()
+
+ target.assert_called_once_with(timeout=42.0)
+
+ def test_apply_passthrough(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+ timeout_ = timeout.ConstantTimeout(42.0)
+ wrapped = timeout_(target)
+
+ wrapped(1, 2, meep="moop")
+
+ target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)
+
+
+class TestExponentialTimeout(object):
+ def test_constructor(self):
+ timeout_ = timeout.ExponentialTimeout()
+ assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
+ assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT
+ assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER
+ assert timeout_._deadline == timeout._DEFAULT_DEADLINE
+
+ def test_constructor_args(self):
+ timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
+ assert timeout_._initial == 1
+ assert timeout_._maximum == 2
+ assert timeout_._multiplier == 3
+ assert timeout_._deadline == 4
+
+ def test_with_timeout(self):
+ original_timeout = timeout.ExponentialTimeout()
+ timeout_ = original_timeout.with_deadline(42)
+ assert original_timeout is not timeout_
+ assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
+ assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT
+ assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER
+ assert timeout_._deadline == 42
+
+ def test___str__(self):
+ timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
+ assert str(timeout_) == (
+ "<ExponentialTimeout initial=1.0, maximum=2.0, multiplier=3.0, "
+ "deadline=4.0>"
+ )
+
+ def test_apply(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+ timeout_ = timeout.ExponentialTimeout(1, 10, 2)
+ wrapped = timeout_(target)
+
+ wrapped()
+ target.assert_called_with(timeout=1)
+
+ wrapped()
+ target.assert_called_with(timeout=2)
+
+ wrapped()
+ target.assert_called_with(timeout=4)
+
+ def test_apply_passthrough(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+ timeout_ = timeout.ExponentialTimeout(42.0, 100, 2)
+ wrapped = timeout_(target)
+
+ wrapped(1, 2, meep="moop")
+
+ target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)