diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 597e0c32..c4e82889 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,4 +1,4 @@
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455
-# created: 2024-09-16T21:04:09.091105552Z
+ digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c
+# created: 2025-03-31T16:51:40.130756953Z
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
index d4ca9418..c1b20096 100644
--- a/.github/release-trigger.yml
+++ b/.github/release-trigger.yml
@@ -1 +1,2 @@
enabled: true
+multiScmName: python-error-reporting
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 698fbc5c..2833fe98 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -12,7 +12,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
- python-version: "3.9"
+ python-version: "3.10"
- name: Install nox
run: |
python -m pip install --upgrade setuptools pip wheel
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
index dd8bd769..c66b757c 100644
--- a/.github/workflows/unittest.yml
+++ b/.github/workflows/unittest.yml
@@ -5,10 +5,13 @@ on:
name: unittest
jobs:
unit:
- runs-on: ubuntu-latest
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed.
+ # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix
+ # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
+ runs-on: ubuntu-22.04
strategy:
matrix:
- python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
+ python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- name: Checkout
uses: actions/checkout@v4
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 0c44d59c..d41b45aa 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,11 +15,13 @@
set -eo pipefail
+CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}")
+
if [[ -z "${PROJECT_ROOT:-}" ]]; then
- PROJECT_ROOT="github/python-error-reporting"
+ PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..")
fi
-cd "${PROJECT_ROOT}"
+pushd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Setup service account credentials.
-export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]]
+then
+ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+fi
# Setup project id.
-export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]]
+then
+ export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+fi
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
@@ -46,7 +54,7 @@ fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3 -m nox -s ${NOX_SESSION:-}
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
deleted file mode 100644
index e5410e29..00000000
--- a/.kokoro/docker/docs/Dockerfile
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ubuntu:24.04
-
-ENV DEBIAN_FRONTEND noninteractive
-
-# Ensure local Python is preferred over distribution Python.
-ENV PATH /usr/local/bin:$PATH
-
-# Install dependencies.
-RUN apt-get update \
- && apt-get install -y --no-install-recommends \
- apt-transport-https \
- build-essential \
- ca-certificates \
- curl \
- dirmngr \
- git \
- gpg-agent \
- graphviz \
- libbz2-dev \
- libdb5.3-dev \
- libexpat1-dev \
- libffi-dev \
- liblzma-dev \
- libreadline-dev \
- libsnappy-dev \
- libssl-dev \
- libsqlite3-dev \
- portaudio19-dev \
- redis-server \
- software-properties-common \
- ssh \
- sudo \
- tcl \
- tcl-dev \
- tk \
- tk-dev \
- uuid-dev \
- wget \
- zlib1g-dev \
- && add-apt-repository universe \
- && apt-get update \
- && apt-get -y install jq \
- && apt-get clean autoclean \
- && apt-get autoremove -y \
- && rm -rf /var/lib/apt/lists/* \
- && rm -f /var/cache/apt/archives/*.deb
-
-
-###################### Install python 3.10.14 for docs/docfx session
-
-# Download python 3.10.14
-RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz
-
-# Extract files
-RUN tar -xvf Python-3.10.14.tgz
-
-# Install python 3.10.14
-RUN ./Python-3.10.14/configure --enable-optimizations
-RUN make altinstall
-
-ENV PATH /usr/local/bin/python3.10:$PATH
-
-###################### Install pip
-RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
- && python3.10 /tmp/get-pip.py \
- && rm /tmp/get-pip.py
-
-# Test pip
-RUN python3.10 -m pip
-
-# Install build requirements
-COPY requirements.txt /requirements.txt
-RUN python3.10 -m pip install --require-hashes -r requirements.txt
-
-CMD ["python3.10"]
diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in
deleted file mode 100644
index 816817c6..00000000
--- a/.kokoro/docker/docs/requirements.in
+++ /dev/null
@@ -1 +0,0 @@
-nox
diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt
deleted file mode 100644
index 7129c771..00000000
--- a/.kokoro/docker/docs/requirements.txt
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.9
-# by the following command:
-#
-# pip-compile --allow-unsafe --generate-hashes requirements.in
-#
-argcomplete==3.4.0 \
- --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
- --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
- # via nox
-colorlog==6.8.2 \
- --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
- --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
- # via nox
-distlib==0.3.8 \
- --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
- --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
- # via virtualenv
-filelock==3.15.4 \
- --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
- --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
- # via virtualenv
-nox==2024.4.15 \
- --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
- --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
- # via -r requirements.in
-packaging==24.1 \
- --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
- --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
- # via nox
-platformdirs==4.2.2 \
- --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
- --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
- # via virtualenv
-tomli==2.0.1 \
- --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
- --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
- # via nox
-virtualenv==20.26.3 \
- --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
- --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
- # via nox
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
deleted file mode 100644
index eade26cf..00000000
--- a/.kokoro/docs/common.cfg
+++ /dev/null
@@ -1,85 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-error-reporting/.kokoro/trampoline_v2.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-error-reporting/.kokoro/publish-docs.sh"
-}
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "docs-staging"
-}
-
-env_vars: {
- key: "V2_STAGING_BUCKET"
- # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2`
- value: "docs-staging-v2"
-}
-
-# It will upload the docker image after successful builds.
-env_vars: {
- key: "TRAMPOLINE_IMAGE_UPLOAD"
- value: "true"
-}
-
-# It will always build the docker image.
-env_vars: {
- key: "TRAMPOLINE_DOCKERFILE"
- value: ".kokoro/docker/docs/Dockerfile"
-}
-
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "docuploader_service_account"
- }
- }
-}
-
-#############################################
-# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
-
-env_vars: {
- key: "PRODUCT_AREA_LABEL"
- value: "observability"
-}
-env_vars: {
- key: "PRODUCT_LABEL"
- value: "error-reporting"
-}
-env_vars: {
- key: "LANGUAGE_LABEL"
- value: "python"
-}
-
-###################################################
-
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
deleted file mode 100644
index 6d7b5186..00000000
--- a/.kokoro/docs/docs-presubmit.cfg
+++ /dev/null
@@ -1,28 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "gcloud-python-test"
-}
-
-env_vars: {
- key: "V2_STAGING_BUCKET"
- value: "gcloud-python-test"
-}
-
-# We only upload the image in the main `docs` build.
-env_vars: {
- key: "TRAMPOLINE_IMAGE_UPLOAD"
- value: "false"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-error-reporting/.kokoro/build.sh"
-}
-
-# Only run this nox session.
-env_vars: {
- key: "NOX_SESSION"
- value: "docs docfx"
-}
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
deleted file mode 100644
index 8f43917d..00000000
--- a/.kokoro/docs/docs.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
deleted file mode 100755
index 233205d5..00000000
--- a/.kokoro/publish-docs.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-export PATH="${HOME}/.local/bin:${PATH}"
-
-# Install nox
-python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt
-python3.10 -m nox --version
-
-# build docs
-nox -s docs
-
-# create metadata
-python3.10 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3.10 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3.10 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
-
-
-# docfx yaml files
-nox -s docfx
-
-# create metadata.
-python3.10 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3.10 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3.10 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
deleted file mode 100755
index 0b483792..00000000
--- a/.kokoro/release.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Start the releasetool reporter
-python3 -m pip install --require-hashes -r github/python-error-reporting/.kokoro/requirements.txt
-python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2")
-cd github/python-error-reporting
-python3 setup.py sdist bdist_wheel
-twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
deleted file mode 100644
index 346d4892..00000000
--- a/.kokoro/release/common.cfg
+++ /dev/null
@@ -1,69 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-error-reporting/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-error-reporting/.kokoro/release.sh"
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google-cloud-pypi-token-keystore-2"
- }
- }
-}
-
-# Tokens needed to report release status back to GitHub
-env_vars: {
- key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
-}
-
-# Store the packages we uploaded to PyPI. That way, we have a record of exactly
-# what we published, which we can use to generate SBOMs and attestations.
-action {
- define_artifacts {
- regex: "github/python-error-reporting/**/*.tar.gz"
- strip_prefix: "github/python-error-reporting"
- }
-}
-
-
-#############################################
-# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
-
-env_vars: {
- key: "PRODUCT_AREA_LABEL"
- value: "observability"
-}
-env_vars: {
- key: "PRODUCT_LABEL"
- value: "error-reporting"
-}
-env_vars: {
- key: "LANGUAGE_LABEL"
- value: "python"
-}
-
-###################################################
-
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
deleted file mode 100644
index 8f43917d..00000000
--- a/.kokoro/release/release.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
deleted file mode 100644
index fff4d9ce..00000000
--- a/.kokoro/requirements.in
+++ /dev/null
@@ -1,11 +0,0 @@
-gcp-docuploader
-gcp-releasetool>=2 # required for compatibility with cryptography>=42.x
-importlib-metadata
-typing-extensions
-twine
-wheel
-setuptools
-nox>=2022.11.21 # required to remove dependency on py
-charset-normalizer<3
-click<8.1.0
-cryptography>=42.0.5
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
deleted file mode 100644
index 9622baf0..00000000
--- a/.kokoro/requirements.txt
+++ /dev/null
@@ -1,537 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.9
-# by the following command:
-#
-# pip-compile --allow-unsafe --generate-hashes requirements.in
-#
-argcomplete==3.4.0 \
- --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
- --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
- # via nox
-attrs==23.2.0 \
- --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \
- --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1
- # via gcp-releasetool
-backports-tarfile==1.2.0 \
- --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \
- --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991
- # via jaraco-context
-cachetools==5.3.3 \
- --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \
- --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105
- # via google-auth
-certifi==2024.7.4 \
- --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
- --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
- # via requests
-cffi==1.16.0 \
- --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \
- --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \
- --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \
- --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
- --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
- --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \
- --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \
- --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \
- --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \
- --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \
- --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \
- --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \
- --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \
- --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \
- --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
- --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \
- --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \
- --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \
- --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \
- --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \
- --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \
- --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \
- --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
- --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \
- --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \
- --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
- --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \
- --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \
- --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \
- --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
- --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \
- --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \
- --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
- --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
- --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \
- --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \
- --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
- --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \
- --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \
- --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \
- --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
- --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \
- --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \
- --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \
- --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \
- --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \
- --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
- --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \
- --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \
- --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \
- --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
- --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357
- # via cryptography
-charset-normalizer==2.1.1 \
- --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
- --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
- # via
- # -r requirements.in
- # requests
-click==8.0.4 \
- --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \
- --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb
- # via
- # -r requirements.in
- # gcp-docuploader
- # gcp-releasetool
-colorlog==6.8.2 \
- --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
- --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
- # via
- # gcp-docuploader
- # nox
-cryptography==42.0.8 \
- --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \
- --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \
- --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \
- --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \
- --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \
- --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \
- --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \
- --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \
- --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \
- --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \
- --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \
- --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \
- --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \
- --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \
- --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \
- --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \
- --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \
- --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \
- --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \
- --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \
- --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \
- --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \
- --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \
- --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \
- --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \
- --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \
- --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \
- --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \
- --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \
- --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \
- --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \
- --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e
- # via
- # -r requirements.in
- # gcp-releasetool
- # secretstorage
-distlib==0.3.8 \
- --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
- --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
- # via virtualenv
-docutils==0.21.2 \
- --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
- --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2
- # via readme-renderer
-filelock==3.15.4 \
- --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
- --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
- # via virtualenv
-gcp-docuploader==0.6.5 \
- --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
- --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
- # via -r requirements.in
-gcp-releasetool==2.0.1 \
- --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \
- --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62
- # via -r requirements.in
-google-api-core==2.19.1 \
- --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \
- --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd
- # via
- # google-cloud-core
- # google-cloud-storage
-google-auth==2.31.0 \
- --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \
- --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871
- # via
- # gcp-releasetool
- # google-api-core
- # google-cloud-core
- # google-cloud-storage
-google-cloud-core==2.4.1 \
- --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
- --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
- # via google-cloud-storage
-google-cloud-storage==2.17.0 \
- --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \
- --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1
- # via gcp-docuploader
-google-crc32c==1.5.0 \
- --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \
- --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \
- --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \
- --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \
- --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \
- --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \
- --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \
- --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \
- --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \
- --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \
- --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \
- --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \
- --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \
- --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \
- --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \
- --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \
- --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \
- --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \
- --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \
- --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \
- --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \
- --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \
- --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \
- --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \
- --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \
- --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \
- --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \
- --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \
- --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \
- --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \
- --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \
- --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \
- --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \
- --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \
- --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \
- --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \
- --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \
- --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \
- --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \
- --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \
- --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \
- --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \
- --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \
- --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \
- --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \
- --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \
- --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \
- --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \
- --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \
- --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \
- --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \
- --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \
- --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \
- --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \
- --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \
- --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \
- --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \
- --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \
- --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \
- --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \
- --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \
- --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \
- --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \
- --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \
- --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \
- --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \
- --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \
- --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4
- # via
- # google-cloud-storage
- # google-resumable-media
-google-resumable-media==2.7.1 \
- --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \
- --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33
- # via google-cloud-storage
-googleapis-common-protos==1.63.2 \
- --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \
- --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87
- # via google-api-core
-idna==3.7 \
- --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
- --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
- # via requests
-importlib-metadata==8.0.0 \
- --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \
- --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812
- # via
- # -r requirements.in
- # keyring
- # twine
-jaraco-classes==3.4.0 \
- --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \
- --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790
- # via keyring
-jaraco-context==5.3.0 \
- --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \
- --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2
- # via keyring
-jaraco-functools==4.0.1 \
- --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \
- --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8
- # via keyring
-jeepney==0.8.0 \
- --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
- --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
- # via
- # keyring
- # secretstorage
-jinja2==3.1.4 \
- --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
- --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
- # via gcp-releasetool
-keyring==25.2.1 \
- --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \
- --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b
- # via
- # gcp-releasetool
- # twine
-markdown-it-py==3.0.0 \
- --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
- --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
- # via rich
-markupsafe==2.1.5 \
- --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
- --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
- --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
- --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
- --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
- --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
- --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
- --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
- --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
- --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
- --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
- --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
- --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
- --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
- --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
- --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
- --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
- --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
- --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
- --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
- --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
- --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
- --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
- --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
- --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
- --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
- --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
- --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
- --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
- --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
- --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
- --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
- --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
- --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
- --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
- --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
- --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
- --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
- --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
- --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
- --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
- --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
- --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
- --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
- --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
- --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
- --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
- --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
- --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
- --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
- --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
- --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
- --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
- --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
- --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
- --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
- --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
- --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
- --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
- --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
- # via jinja2
-mdurl==0.1.2 \
- --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
- --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
- # via markdown-it-py
-more-itertools==10.3.0 \
- --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \
- --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320
- # via
- # jaraco-classes
- # jaraco-functools
-nh3==0.2.18 \
- --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \
- --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \
- --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \
- --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \
- --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \
- --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \
- --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \
- --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \
- --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \
- --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \
- --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \
- --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \
- --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \
- --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \
- --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \
- --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe
- # via readme-renderer
-nox==2024.4.15 \
- --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
- --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
- # via -r requirements.in
-packaging==24.1 \
- --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
- --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
- # via
- # gcp-releasetool
- # nox
-pkginfo==1.10.0 \
- --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \
- --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097
- # via twine
-platformdirs==4.2.2 \
- --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
- --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
- # via virtualenv
-proto-plus==1.24.0 \
- --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \
- --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12
- # via google-api-core
-protobuf==5.27.2 \
- --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \
- --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \
- --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \
- --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \
- --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \
- --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \
- --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \
- --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \
- --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \
- --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \
- --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714
- # via
- # gcp-docuploader
- # gcp-releasetool
- # google-api-core
- # googleapis-common-protos
- # proto-plus
-pyasn1==0.6.0 \
- --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \
- --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473
- # via
- # pyasn1-modules
- # rsa
-pyasn1-modules==0.4.0 \
- --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \
- --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b
- # via google-auth
-pycparser==2.22 \
- --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \
- --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc
- # via cffi
-pygments==2.18.0 \
- --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \
- --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a
- # via
- # readme-renderer
- # rich
-pyjwt==2.8.0 \
- --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \
- --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320
- # via gcp-releasetool
-pyperclip==1.9.0 \
- --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310
- # via gcp-releasetool
-python-dateutil==2.9.0.post0 \
- --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
- --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
- # via gcp-releasetool
-readme-renderer==44.0 \
- --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \
- --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1
- # via twine
-requests==2.32.3 \
- --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
- --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
- # via
- # gcp-releasetool
- # google-api-core
- # google-cloud-storage
- # requests-toolbelt
- # twine
-requests-toolbelt==1.0.0 \
- --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \
- --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06
- # via twine
-rfc3986==2.0.0 \
- --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \
- --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c
- # via twine
-rich==13.7.1 \
- --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \
- --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432
- # via twine
-rsa==4.9 \
- --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
- --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
- # via google-auth
-secretstorage==3.3.3 \
- --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
- --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
- # via keyring
-six==1.16.0 \
- --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
- --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
- # via
- # gcp-docuploader
- # python-dateutil
-tomli==2.0.1 \
- --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
- --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
- # via nox
-twine==5.1.1 \
- --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
- --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
- # via -r requirements.in
-typing-extensions==4.12.2 \
- --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
- --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
- # via -r requirements.in
-urllib3==2.2.2 \
- --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \
- --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168
- # via
- # requests
- # twine
-virtualenv==20.26.3 \
- --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
- --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
- # via nox
-wheel==0.43.0 \
- --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \
- --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81
- # via -r requirements.in
-zipp==3.19.2 \
- --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \
- --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==70.2.0 \
- --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \
- --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1
- # via -r requirements.in
diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg
new file mode 100644
index 00000000..e83c889e
--- /dev/null
+++ b/.kokoro/samples/python3.13/common.cfg
@@ -0,0 +1,60 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.13"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-313"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-error-reporting/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-error-reporting/.kokoro/trampoline_v2.sh"
+
+
+#############################################
+# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
+
+env_vars: {
+ key: "PRODUCT_AREA_LABEL"
+ value: "observability"
+}
+env_vars: {
+ key: "PRODUCT_LABEL"
+ value: "error-reporting"
+}
+env_vars: {
+ key: "LANGUAGE_LABEL"
+ value: "python"
+}
+
+###################################################
+
diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
new file mode 100644
index 00000000..0ab001ca
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-error-reporting/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 55910c8b..53e365bc 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Install nox
-python3.9 -m pip install --upgrade --quiet nox
+# `virtualenv==20.26.6` is added for Python 3.7 compatibility
+python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6
# Use secrets acessor service account to get secrets
if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index c42d93fc..b3960c05 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "1.11.1"
+ ".": "1.12.0"
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d3385b60..a09f8129 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,22 @@
[1]: https://pypi.org/project/google-cloud-error-reporting/#history
+## [1.12.0](https://github.com/googleapis/python-error-reporting/compare/v1.11.1...v1.12.0) (2025-05-21)
+
+
+### Features
+
+* Add REST Interceptors which support reading metadata ([1d120c7](https://github.com/googleapis/python-error-reporting/commit/1d120c77c73b566796b32cab017a0d4cdfa28713))
+* Add support for opt-in debug logging ([1d120c7](https://github.com/googleapis/python-error-reporting/commit/1d120c77c73b566796b32cab017a0d4cdfa28713))
+
+
+### Bug Fixes
+
+* Allow Protobuf 6.x ([#557](https://github.com/googleapis/python-error-reporting/issues/557)) ([9f8faeb](https://github.com/googleapis/python-error-reporting/commit/9f8faeba223a0e1834c0750d21b5cafdee74d327))
+* Fix typing issue with gRPC metadata when key ends in -bin ([1d120c7](https://github.com/googleapis/python-error-reporting/commit/1d120c77c73b566796b32cab017a0d4cdfa28713))
+* Remove setup.cfg configuration for creating universal wheels ([#562](https://github.com/googleapis/python-error-reporting/issues/562)) ([0738c03](https://github.com/googleapis/python-error-reporting/commit/0738c03fa4321fd29c0915da2336bf77947367ae))
+* Require proto-plus >= 1.25.0 for Python 3.13 ([#567](https://github.com/googleapis/python-error-reporting/issues/567)) ([d5cd225](https://github.com/googleapis/python-error-reporting/commit/d5cd225fd71d8b54197c4f02c30d32eb8cd24dfa))
+
## [1.11.1](https://github.com/googleapis/python-error-reporting/compare/v1.11.0...v1.11.1) (2024-09-17)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 5fc13e52..5307292a 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.12 -- -k
+ $ nox -s unit-3.13 -- -k
.. note::
@@ -227,6 +227,7 @@ We support:
- `Python 3.10`_
- `Python 3.11`_
- `Python 3.12`_
+- `Python 3.13`_
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
@@ -234,6 +235,7 @@ We support:
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
.. _Python 3.12: https://docs.python.org/3.12/
+.. _Python 3.13: https://docs.python.org/3.13/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/README.rst b/README.rst
index 445767c5..a1b9e25e 100644
--- a/README.rst
+++ b/README.rst
@@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps:
1. `Select or create a Cloud Platform project.`_
2. `Enable billing for your project.`_
3. `Enable the Error Reporting API.`_
-4. `Setup Authentication.`_
+4. `Set up Authentication.`_
.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
.. _Enable the Error Reporting API.: https://cloud.google.com/error-reporting
-.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
Installation
~~~~~~~~~~~~
@@ -106,3 +106,92 @@ Next Steps
.. _Error Reporting API Product documentation: https://cloud.google.com/error-reporting
.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst
+
+Logging
+-------
+
+This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes.
+Note the following:
+
+#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging.
+#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**.
+#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below.
+
+Simple, environment-based configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google
+logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged
+messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging
+event.
+
+A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log.
+
+- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc.
+- Invalid logging scopes: :code:`foo`, :code:`123`, etc.
+
+**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers.
+
+Environment-Based Examples
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- Enabling the default handler for all Google-based loggers
+
+.. code-block:: console
+
+ export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google
+
+- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`):
+
+.. code-block:: console
+
+ export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1
+
+
+Advanced, code-based configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can also configure a valid logging scope using Python's standard `logging` mechanism.
+
+Code-Based Examples
+^^^^^^^^^^^^^^^^^^^
+
+- Configuring a handler for all Google-based loggers
+
+.. code-block:: python
+
+ import logging
+
+ from google.cloud import library_v1
+
+ base_logger = logging.getLogger("google")
+ base_logger.addHandler(logging.StreamHandler())
+ base_logger.setLevel(logging.DEBUG)
+
+- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`):
+
+.. code-block:: python
+
+ import logging
+
+ from google.cloud import library_v1
+
+ base_logger = logging.getLogger("google.cloud.library_v1")
+ base_logger.addHandler(logging.StreamHandler())
+ base_logger.setLevel(logging.DEBUG)
+
+Logging details
+~~~~~~~~~~~~~~~
+
+#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root
+ logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set
+ :code:`logging.getLogger("google").propagate = True` in your code.
+#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for
+ one library, but decide you need to also set up environment-based logging configuration for another library.
+
+ #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual
+ if the code -based configuration gets applied first.
+
+#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get
+ executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured.
+ (This is the reason for 2.i. above.)
diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py
index a034d27a..7138f214 100644
--- a/google/cloud/error_reporting/gapic_version.py
+++ b/google/cloud/error_reporting/gapic_version.py
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "1.11.1" # {x-release-please-version}
+__version__ = "1.12.0" # {x-release-please-version}
diff --git a/google/cloud/errorreporting_v1beta1/__init__.py b/google/cloud/errorreporting_v1beta1/__init__.py
index f2c069f9..e6ff221d 100644
--- a/google/cloud/errorreporting_v1beta1/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py
index a034d27a..7138f214 100644
--- a/google/cloud/errorreporting_v1beta1/gapic_version.py
+++ b/google/cloud/errorreporting_v1beta1/gapic_version.py
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "1.11.1" # {x-release-please-version}
+__version__ = "1.12.0" # {x-release-please-version}
diff --git a/google/cloud/errorreporting_v1beta1/services/__init__.py b/google/cloud/errorreporting_v1beta1/services/__init__.py
index 8f6cf068..cbf94b28 100644
--- a/google/cloud/errorreporting_v1beta1/services/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py
index 7adebfc3..1e8d42ee 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py
index 21361c25..3e9cf737 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging as std_logging
from collections import OrderedDict
import re
from typing import (
@@ -36,6 +37,7 @@
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
@@ -49,6 +51,15 @@
from .transports.grpc_asyncio import ErrorGroupServiceGrpcAsyncIOTransport
from .client import ErrorGroupServiceClient
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
class ErrorGroupServiceAsyncClient:
"""Service for retrieving and updating individual error groups."""
@@ -260,6 +271,28 @@ def __init__(
client_info=client_info,
)
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceAsyncClient`.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "universeDomain": getattr(
+ self._client._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._client._transport, "_credentials")
+ else {
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "credentialsType": None,
+ },
+ )
+
async def get_group(
self,
request: Optional[Union[error_group_service.GetGroupRequest, dict]] = None,
@@ -267,7 +300,7 @@ async def get_group(
group_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> common.ErrorGroup:
r"""Get the specified group.
@@ -329,8 +362,10 @@ async def sample_get_group():
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.ErrorGroup:
@@ -341,7 +376,10 @@ async def sample_get_group():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([group_name])
+ flattened_params = [group_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -393,7 +431,7 @@ async def update_group(
group: Optional[common.ErrorGroup] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> common.ErrorGroup:
r"""Replace the data for the specified group.
Fails if the group does not exist.
@@ -437,8 +475,10 @@ async def sample_update_group():
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.ErrorGroup:
@@ -449,7 +489,10 @@ async def sample_update_group():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([group])
+ flattened_params = [group]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -505,5 +548,8 @@ async def __aexit__(self, exc_type, exc, tb):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
__all__ = ("ErrorGroupServiceAsyncClient",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py
index 97cb9157..cc4b15df 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,6 +14,9 @@
# limitations under the License.
#
from collections import OrderedDict
+from http import HTTPStatus
+import json
+import logging as std_logging
import os
import re
from typing import (
@@ -42,12 +45,22 @@
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import error_group_service
from .transports.base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO
@@ -455,52 +468,45 @@ def _get_universe_domain(
raise ValueError("Universe Domain cannot be an empty string.")
return universe_domain
- @staticmethod
- def _compare_universes(
- client_universe: str, credentials: ga_credentials.Credentials
- ) -> bool:
- """Returns True iff the universe domains used by the client and credentials match.
-
- Args:
- client_universe (str): The universe domain configured via the client options.
- credentials (ga_credentials.Credentials): The credentials being used in the client.
+ def _validate_universe_domain(self):
+ """Validates client's and credentials' universe domains are consistent.
Returns:
- bool: True iff client_universe matches the universe in credentials.
+ bool: True iff the configured universe domain is valid.
Raises:
- ValueError: when client_universe does not match the universe in credentials.
+ ValueError: If the configured universe domain is not valid.
"""
- default_universe = ErrorGroupServiceClient._DEFAULT_UNIVERSE
- credentials_universe = getattr(credentials, "universe_domain", default_universe)
-
- if client_universe != credentials_universe:
- raise ValueError(
- "The configured universe domain "
- f"({client_universe}) does not match the universe domain "
- f"found in the credentials ({credentials_universe}). "
- "If you haven't configured the universe domain explicitly, "
- f"`{default_universe}` is the default."
- )
+ # NOTE (b/349488459): universe validation is disabled until further notice.
return True
- def _validate_universe_domain(self):
- """Validates client's and credentials' universe domains are consistent.
-
- Returns:
- bool: True iff the configured universe domain is valid.
+ def _add_cred_info_for_auth_errors(
+ self, error: core_exceptions.GoogleAPICallError
+ ) -> None:
+ """Adds credential info string to error details for 401/403/404 errors.
- Raises:
- ValueError: If the configured universe domain is not valid.
+ Args:
+ error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info.
"""
- self._is_universe_domain_valid = (
- self._is_universe_domain_valid
- or ErrorGroupServiceClient._compare_universes(
- self.universe_domain, self.transport._credentials
- )
- )
- return self._is_universe_domain_valid
+ if error.code not in [
+ HTTPStatus.UNAUTHORIZED,
+ HTTPStatus.FORBIDDEN,
+ HTTPStatus.NOT_FOUND,
+ ]:
+ return
+
+ cred = self._transport._credentials
+
+ # get_cred_info is only available in google-auth>=2.35.0
+ if not hasattr(cred, "get_cred_info"):
+ return
+
+ # ignore the type check since pypy test fails when get_cred_info
+ # is not available
+ cred_info = cred.get_cred_info() # type: ignore
+ if cred_info and hasattr(error._details, "append"):
+ error._details.append(json.dumps(cred_info))
@property
def api_endpoint(self):
@@ -610,6 +616,10 @@ def __init__(
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
+ if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
+ # Setup logging.
+ client_logging.initialize_logging()
+
api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
@@ -676,6 +686,29 @@ def __init__(
api_audience=self._client_options.api_audience,
)
+ if "async" not in str(self._transport):
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient`.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "universeDomain": getattr(
+ self._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._transport, "_credentials")
+ else {
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "credentialsType": None,
+ },
+ )
+
def get_group(
self,
request: Optional[Union[error_group_service.GetGroupRequest, dict]] = None,
@@ -683,7 +716,7 @@ def get_group(
group_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> common.ErrorGroup:
r"""Get the specified group.
@@ -745,8 +778,10 @@ def sample_get_group():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.ErrorGroup:
@@ -757,7 +792,10 @@ def sample_get_group():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([group_name])
+ flattened_params = [group_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -806,7 +844,7 @@ def update_group(
group: Optional[common.ErrorGroup] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> common.ErrorGroup:
r"""Replace the data for the specified group.
Fails if the group does not exist.
@@ -850,8 +888,10 @@ def sample_update_group():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.ErrorGroup:
@@ -862,7 +902,10 @@ def sample_update_group():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([group])
+ flattened_params = [group]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -922,5 +965,7 @@ def __exit__(self, type, value, traceback):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
__all__ = ("ErrorGroupServiceClient",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst
new file mode 100644
index 00000000..a0b01808
--- /dev/null
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`ErrorGroupServiceTransport` is the ABC for all transports.
+- public child `ErrorGroupServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `ErrorGroupServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseErrorGroupServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `ErrorGroupServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py
index ee3446ba..f02ba22b 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py
index 52b03cea..9938eaf4 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,6 +25,7 @@
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import error_group_service
@@ -33,6 +34,9 @@
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ErrorGroupServiceTransport(abc.ABC):
"""Abstract transport class for ErrorGroupService."""
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py
index 793b005b..8bfab8f0 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import json
+import logging as std_logging
+import pickle
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
@@ -21,13 +24,90 @@
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import error_group_service
from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = response.result()
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response for {client_call_details.method}.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": client_call_details.method,
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ErrorGroupServiceGrpcTransport(ErrorGroupServiceTransport):
"""gRPC backend transport for ErrorGroupService.
@@ -181,7 +261,12 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientInterceptor()
+ self._logged_channel = grpc.intercept_channel(
+ self._grpc_channel, self._interceptor
+ )
+
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
@@ -255,7 +340,7 @@ def get_group(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_group" not in self._stubs:
- self._stubs["get_group"] = self.grpc_channel.unary_unary(
+ self._stubs["get_group"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup",
request_serializer=error_group_service.GetGroupRequest.serialize,
response_deserializer=common.ErrorGroup.deserialize,
@@ -282,7 +367,7 @@ def update_group(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_group" not in self._stubs:
- self._stubs["update_group"] = self.grpc_channel.unary_unary(
+ self._stubs["update_group"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup",
request_serializer=error_group_service.UpdateGroupRequest.serialize,
response_deserializer=common.ErrorGroup.deserialize,
@@ -290,7 +375,7 @@ def update_group(
return self._stubs["update_group"]
def close(self):
- self.grpc_channel.close()
+ self._logged_channel.close()
@property
def kind(self) -> str:
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py
index 87365094..f3186e61 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
+import json
+import pickle
+import logging as std_logging
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
@@ -22,8 +26,11 @@
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.errorreporting_v1beta1.types import common
@@ -31,6 +38,82 @@
from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ErrorGroupServiceGrpcTransport
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientAIOInterceptor(
+ grpc.aio.UnaryUnaryClientInterceptor
+): # pragma: NO COVER
+ async def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = await continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = await response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = await response
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response to rpc {client_call_details.method}.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": str(client_call_details.method),
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ErrorGroupServiceGrpcAsyncIOTransport(ErrorGroupServiceTransport):
"""gRPC AsyncIO backend transport for ErrorGroupService.
@@ -227,7 +310,13 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientAIOInterceptor()
+ self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
+ self._logged_channel = self._grpc_channel
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@property
@@ -259,7 +348,7 @@ def get_group(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_group" not in self._stubs:
- self._stubs["get_group"] = self.grpc_channel.unary_unary(
+ self._stubs["get_group"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup",
request_serializer=error_group_service.GetGroupRequest.serialize,
response_deserializer=common.ErrorGroup.deserialize,
@@ -288,7 +377,7 @@ def update_group(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_group" not in self._stubs:
- self._stubs["update_group"] = self.grpc_channel.unary_unary(
+ self._stubs["update_group"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup",
request_serializer=error_group_service.UpdateGroupRequest.serialize,
response_deserializer=common.ErrorGroup.deserialize,
@@ -298,20 +387,29 @@ def update_group(
def _prep_wrapped_messages(self, client_info):
"""Precompute the wrapped methods, overriding the base class method to use async wrappers."""
self._wrapped_methods = {
- self.get_group: gapic_v1.method_async.wrap_method(
+ self.get_group: self._wrap_method(
self.get_group,
default_timeout=None,
client_info=client_info,
),
- self.update_group: gapic_v1.method_async.wrap_method(
+ self.update_group: self._wrap_method(
self.update_group,
default_timeout=None,
client_info=client_info,
),
}
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
- return self.grpc_channel.close()
+ return self._logged_channel.close()
+
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
__all__ = ("ErrorGroupServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py
index 900d875a..526f1d7f 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,47 +13,56 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging
+import json # type: ignore
from google.auth.transport.requests import AuthorizedSession # type: ignore
-import json # type: ignore
-import grpc # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
from google.api_core import rest_helpers
from google.api_core import rest_streaming
-from google.api_core import path_template
from google.api_core import gapic_v1
+import google.protobuf
from google.protobuf import json_format
+
from requests import __version__ as requests_version
import dataclasses
-import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import warnings
+
+from google.cloud.errorreporting_v1beta1.types import common
+from google.cloud.errorreporting_v1beta1.types import error_group_service
+
+
+from .rest_base import _BaseErrorGroupServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+try:
+ from google.api_core import client_logging # type: ignore
-from google.cloud.errorreporting_v1beta1.types import common
-from google.cloud.errorreporting_v1beta1.types import error_group_service
-
-from .base import (
- ErrorGroupServiceTransport,
- DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO,
-)
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+_LOGGER = logging.getLogger(__name__)
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
grpc_version=None,
- rest_version=requests_version,
+ rest_version=f"requests@{requests_version}",
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ErrorGroupServiceRestInterceptor:
"""Interceptor for ErrorGroupService.
@@ -95,8 +104,10 @@ def post_update_group(self, response):
def pre_get_group(
self,
request: error_group_service.GetGroupRequest,
- metadata: Sequence[Tuple[str, str]],
- ) -> Tuple[error_group_service.GetGroupRequest, Sequence[Tuple[str, str]]]:
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_group_service.GetGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]
+ ]:
"""Pre-rpc interceptor for get_group
Override in a subclass to manipulate the request or metadata
@@ -107,17 +118,42 @@ def pre_get_group(
def post_get_group(self, response: common.ErrorGroup) -> common.ErrorGroup:
"""Post-rpc interceptor for get_group
- Override in a subclass to manipulate the response
+ DEPRECATED. Please use the `post_get_group_with_metadata`
+ interceptor instead.
+
+ Override in a subclass to read or manipulate the response
after it is returned by the ErrorGroupService server but before
- it is returned to user code.
+ it is returned to user code. This `post_get_group` interceptor runs
+ before the `post_get_group_with_metadata` interceptor.
"""
return response
+ def post_get_group_with_metadata(
+ self,
+ response: common.ErrorGroup,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[common.ErrorGroup, Sequence[Tuple[str, Union[str, bytes]]]]:
+ """Post-rpc interceptor for get_group
+
+ Override in a subclass to read or manipulate the response or metadata after it
+ is returned by the ErrorGroupService server but before it is returned to user code.
+
+ We recommend only using this `post_get_group_with_metadata`
+ interceptor in new development instead of the `post_get_group` interceptor.
+ When both interceptors are used, this `post_get_group_with_metadata` interceptor runs after the
+ `post_get_group` interceptor. The (possibly modified) response returned by
+ `post_get_group` will be passed to
+ `post_get_group_with_metadata`.
+ """
+ return response, metadata
+
def pre_update_group(
self,
request: error_group_service.UpdateGroupRequest,
- metadata: Sequence[Tuple[str, str]],
- ) -> Tuple[error_group_service.UpdateGroupRequest, Sequence[Tuple[str, str]]]:
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_group_service.UpdateGroupRequest, Sequence[Tuple[str, Union[str, bytes]]]
+ ]:
"""Pre-rpc interceptor for update_group
Override in a subclass to manipulate the request or metadata
@@ -128,12 +164,35 @@ def pre_update_group(
def post_update_group(self, response: common.ErrorGroup) -> common.ErrorGroup:
"""Post-rpc interceptor for update_group
- Override in a subclass to manipulate the response
+ DEPRECATED. Please use the `post_update_group_with_metadata`
+ interceptor instead.
+
+ Override in a subclass to read or manipulate the response
after it is returned by the ErrorGroupService server but before
- it is returned to user code.
+ it is returned to user code. This `post_update_group` interceptor runs
+ before the `post_update_group_with_metadata` interceptor.
"""
return response
+ def post_update_group_with_metadata(
+ self,
+ response: common.ErrorGroup,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[common.ErrorGroup, Sequence[Tuple[str, Union[str, bytes]]]]:
+ """Post-rpc interceptor for update_group
+
+ Override in a subclass to read or manipulate the response or metadata after it
+ is returned by the ErrorGroupService server but before it is returned to user code.
+
+ We recommend only using this `post_update_group_with_metadata`
+ interceptor in new development instead of the `post_update_group` interceptor.
+ When both interceptors are used, this `post_update_group_with_metadata` interceptor runs after the
+ `post_update_group` interceptor. The (possibly modified) response returned by
+ `post_update_group` will be passed to
+ `post_update_group_with_metadata`.
+ """
+ return response, metadata
+
@dataclasses.dataclass
class ErrorGroupServiceRestStub:
@@ -142,8 +201,8 @@ class ErrorGroupServiceRestStub:
_interceptor: ErrorGroupServiceRestInterceptor
-class ErrorGroupServiceRestTransport(ErrorGroupServiceTransport):
- """REST backend transport for ErrorGroupService.
+class ErrorGroupServiceRestTransport(_BaseErrorGroupServiceRestTransport):
+ """REST backend synchronous transport for ErrorGroupService.
Service for retrieving and updating individual error groups.
@@ -152,7 +211,6 @@ class ErrorGroupServiceRestTransport(ErrorGroupServiceTransport):
and call it.
It sends JSON representations of protocol buffers over HTTP/1.1
-
"""
def __init__(
@@ -206,21 +264,12 @@ def __init__(
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
# TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
# credentials object
- maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
+ url_scheme=url_scheme,
api_audience=api_audience,
)
self._session = AuthorizedSession(
@@ -231,19 +280,33 @@ def __init__(
self._interceptor = interceptor or ErrorGroupServiceRestInterceptor()
self._prep_wrapped_messages(client_info)
- class _GetGroup(ErrorGroupServiceRestStub):
+ class _GetGroup(
+ _BaseErrorGroupServiceRestTransport._BaseGetGroup, ErrorGroupServiceRestStub
+ ):
def __hash__(self):
- return hash("GetGroup")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("ErrorGroupServiceRestTransport.GetGroup")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -251,7 +314,7 @@ def __call__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> common.ErrorGroup:
r"""Call the get group method over HTTP.
@@ -262,8 +325,10 @@ def __call__(
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.common.ErrorGroup:
@@ -272,42 +337,55 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1beta1/{group_name=projects/*/groups/*}",
- },
- {
- "method": "get",
- "uri": "/v1beta1/{group_name=projects/*/locations/*/groups/*}",
- },
- ]
- request, metadata = self._interceptor.pre_get_group(request, metadata)
- pb_request = error_group_service.GetGroupRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
+ http_options = (
+ _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_http_options()
+ )
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ request, metadata = self._interceptor.pre_get_group(request, metadata)
+ transcoded_request = _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
- query_params["$alt"] = "json;enum-encoding=int"
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ request_url = "{host}{uri}".format(
+ host=self._host, uri=transcoded_request["uri"]
+ )
+ method = transcoded_request["method"]
+ try:
+ request_payload = type(request).to_json(request)
+ except:
+ request_payload = None
+ http_request = {
+ "payload": request_payload,
+ "requestMethod": method,
+ "requestUrl": request_url,
+ "headers": dict(metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.GetGroup",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": "GetGroup",
+ "httpRequest": http_request,
+ "metadata": http_request["headers"],
+ },
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = ErrorGroupServiceRestTransport._GetGroup._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -320,22 +398,63 @@ def __call__(
pb_resp = common.ErrorGroup.pb(resp)
json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+
resp = self._interceptor.post_get_group(resp)
+ response_metadata = [(k, str(v)) for k, v in response.headers.items()]
+ resp, _ = self._interceptor.post_get_group_with_metadata(
+ resp, response_metadata
+ )
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ try:
+ response_payload = common.ErrorGroup.to_json(response)
+ except:
+ response_payload = None
+ http_response = {
+ "payload": response_payload,
+ "headers": dict(response.headers),
+ "status": response.status_code,
+ }
+ _LOGGER.debug(
+ "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.get_group",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": "GetGroup",
+ "metadata": http_response["headers"],
+ "httpResponse": http_response,
+ },
+ )
return resp
- class _UpdateGroup(ErrorGroupServiceRestStub):
+ class _UpdateGroup(
+ _BaseErrorGroupServiceRestTransport._BaseUpdateGroup, ErrorGroupServiceRestStub
+ ):
def __hash__(self):
- return hash("UpdateGroup")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("ErrorGroupServiceRestTransport.UpdateGroup")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -343,7 +462,7 @@ def __call__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> common.ErrorGroup:
r"""Call the update group method over HTTP.
@@ -354,8 +473,10 @@ def __call__(
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.common.ErrorGroup:
@@ -364,50 +485,60 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "put",
- "uri": "/v1beta1/{group.name=projects/*/groups/*}",
- "body": "group",
- },
- {
- "method": "put",
- "uri": "/v1beta1/{group.name=projects/*/locations/*/groups/*}",
- "body": "group",
- },
- ]
- request, metadata = self._interceptor.pre_update_group(request, metadata)
- pb_request = error_group_service.UpdateGroupRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
+ http_options = (
+ _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_http_options()
+ )
- # Jsonify the request body
+ request, metadata = self._interceptor.pre_update_group(request, metadata)
+ transcoded_request = _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
- query_params["$alt"] = "json;enum-encoding=int"
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ request_url = "{host}{uri}".format(
+ host=self._host, uri=transcoded_request["uri"]
+ )
+ method = transcoded_request["method"]
+ try:
+ request_payload = type(request).to_json(request)
+ except:
+ request_payload = None
+ http_request = {
+ "payload": request_payload,
+ "requestMethod": method,
+ "requestUrl": request_url,
+ "headers": dict(metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.UpdateGroup",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": "UpdateGroup",
+ "httpRequest": http_request,
+ "metadata": http_request["headers"],
+ },
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = ErrorGroupServiceRestTransport._UpdateGroup._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -420,7 +551,33 @@ def __call__(
pb_resp = common.ErrorGroup.pb(resp)
json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+
resp = self._interceptor.post_update_group(resp)
+ response_metadata = [(k, str(v)) for k, v in response.headers.items()]
+ resp, _ = self._interceptor.post_update_group_with_metadata(
+ resp, response_metadata
+ )
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ try:
+ response_payload = common.ErrorGroup.to_json(response)
+ except:
+ response_payload = None
+ http_response = {
+ "payload": response_payload,
+ "headers": dict(response.headers),
+ "status": response.status_code,
+ }
+ _LOGGER.debug(
+ "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.update_group",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService",
+ "rpcName": "UpdateGroup",
+ "metadata": http_response["headers"],
+ "httpResponse": http_response,
+ },
+ )
return resp
@property
diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py
new file mode 100644
index 00000000..f4330f97
--- /dev/null
+++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py
@@ -0,0 +1,207 @@
+# -*- coding: utf-8 -*-
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.errorreporting_v1beta1.types import common
+from google.cloud.errorreporting_v1beta1.types import error_group_service
+
+
+class _BaseErrorGroupServiceRestTransport(ErrorGroupServiceTransport):
+ """Base REST backend transport for ErrorGroupService.
+
+ Note: This class is not meant to be used directly. Use its sync and
+ async sub-classes instead.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "clouderrorreporting.googleapis.com",
+ credentials: Optional[Any] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ api_audience: Optional[str] = None,
+ ) -> None:
+ """Instantiate the transport.
+ Args:
+ host (Optional[str]):
+ The hostname to connect to (default: 'clouderrorreporting.googleapis.com').
+ credentials (Optional[Any]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you are developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ """
+ # Run the base constructor
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ api_audience=api_audience,
+ )
+
+ class _BaseGetGroup:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1beta1/{group_name=projects/*/groups/*}",
+ },
+ {
+ "method": "get",
+ "uri": "/v1beta1/{group_name=projects/*/locations/*/groups/*}",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = error_group_service.GetGroupRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseUpdateGroup:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "put",
+ "uri": "/v1beta1/{group.name=projects/*/groups/*}",
+ "body": "group",
+ },
+ {
+ "method": "put",
+ "uri": "/v1beta1/{group.name=projects/*/locations/*/groups/*}",
+ "body": "group",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = error_group_service.UpdateGroupRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+
+__all__ = ("_BaseErrorGroupServiceRestTransport",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py
index a31a509a..e04db91b 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py
index 232049f7..f588025e 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging as std_logging
from collections import OrderedDict
import re
from typing import (
@@ -36,6 +37,7 @@
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
@@ -50,6 +52,15 @@
from .transports.grpc_asyncio import ErrorStatsServiceGrpcAsyncIOTransport
from .client import ErrorStatsServiceClient
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
class ErrorStatsServiceAsyncClient:
"""An API for retrieving and managing error statistics as well
@@ -263,6 +274,28 @@ def __init__(
client_info=client_info,
)
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceAsyncClient`.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "universeDomain": getattr(
+ self._client._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._client._transport, "_credentials")
+ else {
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "credentialsType": None,
+ },
+ )
+
async def list_group_stats(
self,
request: Optional[
@@ -273,7 +306,7 @@ async def list_group_stats(
time_range: Optional[error_stats_service.QueryTimeRange] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListGroupStatsAsyncPager:
r"""Lists the specified groups.
@@ -354,8 +387,10 @@ async def sample_list_group_stats():
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsAsyncPager:
@@ -369,7 +404,10 @@ async def sample_list_group_stats():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name, time_range])
+ flattened_params = [project_name, time_range]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -435,7 +473,7 @@ async def list_events(
group_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListEventsAsyncPager:
r"""Lists the specified events.
@@ -504,8 +542,10 @@ async def sample_list_events():
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsAsyncPager:
@@ -519,7 +559,10 @@ async def sample_list_events():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name, group_id])
+ flattened_params = [project_name, group_id]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -584,7 +627,7 @@ async def delete_events(
project_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> error_stats_service.DeleteEventsResponse:
r"""Deletes all error events of a given project.
@@ -638,8 +681,10 @@ async def sample_delete_events():
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse:
@@ -650,7 +695,10 @@ async def sample_delete_events():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name])
+ flattened_params = [project_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -706,5 +754,8 @@ async def __aexit__(self, exc_type, exc, tb):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
__all__ = ("ErrorStatsServiceAsyncClient",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py
index 2f128d1a..dd22c33b 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,6 +14,9 @@
# limitations under the License.
#
from collections import OrderedDict
+from http import HTTPStatus
+import json
+import logging as std_logging
import os
import re
from typing import (
@@ -42,12 +45,22 @@
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
from google.cloud.errorreporting_v1beta1.services.error_stats_service import pagers
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import error_stats_service
@@ -458,52 +471,45 @@ def _get_universe_domain(
raise ValueError("Universe Domain cannot be an empty string.")
return universe_domain
- @staticmethod
- def _compare_universes(
- client_universe: str, credentials: ga_credentials.Credentials
- ) -> bool:
- """Returns True iff the universe domains used by the client and credentials match.
-
- Args:
- client_universe (str): The universe domain configured via the client options.
- credentials (ga_credentials.Credentials): The credentials being used in the client.
+ def _validate_universe_domain(self):
+ """Validates client's and credentials' universe domains are consistent.
Returns:
- bool: True iff client_universe matches the universe in credentials.
+ bool: True iff the configured universe domain is valid.
Raises:
- ValueError: when client_universe does not match the universe in credentials.
+ ValueError: If the configured universe domain is not valid.
"""
- default_universe = ErrorStatsServiceClient._DEFAULT_UNIVERSE
- credentials_universe = getattr(credentials, "universe_domain", default_universe)
-
- if client_universe != credentials_universe:
- raise ValueError(
- "The configured universe domain "
- f"({client_universe}) does not match the universe domain "
- f"found in the credentials ({credentials_universe}). "
- "If you haven't configured the universe domain explicitly, "
- f"`{default_universe}` is the default."
- )
+ # NOTE (b/349488459): universe validation is disabled until further notice.
return True
- def _validate_universe_domain(self):
- """Validates client's and credentials' universe domains are consistent.
-
- Returns:
- bool: True iff the configured universe domain is valid.
+ def _add_cred_info_for_auth_errors(
+ self, error: core_exceptions.GoogleAPICallError
+ ) -> None:
+ """Adds credential info string to error details for 401/403/404 errors.
- Raises:
- ValueError: If the configured universe domain is not valid.
+ Args:
+ error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info.
"""
- self._is_universe_domain_valid = (
- self._is_universe_domain_valid
- or ErrorStatsServiceClient._compare_universes(
- self.universe_domain, self.transport._credentials
- )
- )
- return self._is_universe_domain_valid
+ if error.code not in [
+ HTTPStatus.UNAUTHORIZED,
+ HTTPStatus.FORBIDDEN,
+ HTTPStatus.NOT_FOUND,
+ ]:
+ return
+
+ cred = self._transport._credentials
+
+ # get_cred_info is only available in google-auth>=2.35.0
+ if not hasattr(cred, "get_cred_info"):
+ return
+
+ # ignore the type check since pypy test fails when get_cred_info
+ # is not available
+ cred_info = cred.get_cred_info() # type: ignore
+ if cred_info and hasattr(error._details, "append"):
+ error._details.append(json.dumps(cred_info))
@property
def api_endpoint(self):
@@ -613,6 +619,10 @@ def __init__(
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
+ if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
+ # Setup logging.
+ client_logging.initialize_logging()
+
api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
@@ -679,6 +689,29 @@ def __init__(
api_audience=self._client_options.api_audience,
)
+ if "async" not in str(self._transport):
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient`.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "universeDomain": getattr(
+ self._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._transport, "_credentials")
+ else {
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "credentialsType": None,
+ },
+ )
+
def list_group_stats(
self,
request: Optional[
@@ -689,7 +722,7 @@ def list_group_stats(
time_range: Optional[error_stats_service.QueryTimeRange] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListGroupStatsPager:
r"""Lists the specified groups.
@@ -770,8 +803,10 @@ def sample_list_group_stats():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsPager:
@@ -785,7 +820,10 @@ def sample_list_group_stats():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name, time_range])
+ flattened_params = [project_name, time_range]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -848,7 +886,7 @@ def list_events(
group_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListEventsPager:
r"""Lists the specified events.
@@ -917,8 +955,10 @@ def sample_list_events():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsPager:
@@ -932,7 +972,10 @@ def sample_list_events():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name, group_id])
+ flattened_params = [project_name, group_id]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -994,7 +1037,7 @@ def delete_events(
project_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> error_stats_service.DeleteEventsResponse:
r"""Deletes all error events of a given project.
@@ -1048,8 +1091,10 @@ def sample_delete_events():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse:
@@ -1060,7 +1105,10 @@ def sample_delete_events():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name])
+ flattened_params = [project_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -1120,5 +1168,7 @@ def __exit__(self, type, value, traceback):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
__all__ = ("ErrorStatsServiceClient",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py
index 274f7ac2..190fecd9 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -67,7 +67,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -81,8 +81,10 @@ def __init__(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = error_stats_service.ListGroupStatsRequest(request)
@@ -141,7 +143,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -155,8 +157,10 @@ def __init__(
retry (google.api_core.retry.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = error_stats_service.ListGroupStatsRequest(request)
@@ -219,7 +223,7 @@ def __init__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -233,8 +237,10 @@ def __init__(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = error_stats_service.ListEventsRequest(request)
@@ -293,7 +299,7 @@ def __init__(
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = ()
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -307,8 +313,10 @@ def __init__(
retry (google.api_core.retry.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = error_stats_service.ListEventsRequest(request)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst
new file mode 100644
index 00000000..9fb4cf06
--- /dev/null
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`ErrorStatsServiceTransport` is the ABC for all transports.
+- public child `ErrorStatsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `ErrorStatsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseErrorStatsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `ErrorStatsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py
index 69c4f43c..8f452e45 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py
index 2ca8391c..12f6992a 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,6 +25,7 @@
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
from google.cloud.errorreporting_v1beta1.types import error_stats_service
@@ -32,6 +33,9 @@
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ErrorStatsServiceTransport(abc.ABC):
"""Abstract transport class for ErrorStatsService."""
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py
index b1f71dd3..9bcb9106 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import json
+import logging as std_logging
+import pickle
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
@@ -21,12 +24,89 @@
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from google.cloud.errorreporting_v1beta1.types import error_stats_service
from .base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = response.result()
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response for {client_call_details.method}.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": client_call_details.method,
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ErrorStatsServiceGrpcTransport(ErrorStatsServiceTransport):
"""gRPC backend transport for ErrorStatsService.
@@ -181,7 +261,12 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientInterceptor()
+ self._logged_channel = grpc.intercept_channel(
+ self._grpc_channel, self._interceptor
+ )
+
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
@@ -258,7 +343,7 @@ def list_group_stats(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_group_stats" not in self._stubs:
- self._stubs["list_group_stats"] = self.grpc_channel.unary_unary(
+ self._stubs["list_group_stats"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats",
request_serializer=error_stats_service.ListGroupStatsRequest.serialize,
response_deserializer=error_stats_service.ListGroupStatsResponse.deserialize,
@@ -286,7 +371,7 @@ def list_events(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_events" not in self._stubs:
- self._stubs["list_events"] = self.grpc_channel.unary_unary(
+ self._stubs["list_events"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents",
request_serializer=error_stats_service.ListEventsRequest.serialize,
response_deserializer=error_stats_service.ListEventsResponse.deserialize,
@@ -315,7 +400,7 @@ def delete_events(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_events" not in self._stubs:
- self._stubs["delete_events"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_events"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents",
request_serializer=error_stats_service.DeleteEventsRequest.serialize,
response_deserializer=error_stats_service.DeleteEventsResponse.deserialize,
@@ -323,7 +408,7 @@ def delete_events(
return self._stubs["delete_events"]
def close(self):
- self.grpc_channel.close()
+ self._logged_channel.close()
@property
def kind(self) -> str:
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py
index 895a1295..a6211645 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
+import json
+import pickle
+import logging as std_logging
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
@@ -22,14 +26,93 @@
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.errorreporting_v1beta1.types import error_stats_service
from .base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ErrorStatsServiceGrpcTransport
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientAIOInterceptor(
+ grpc.aio.UnaryUnaryClientInterceptor
+): # pragma: NO COVER
+ async def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = await continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = await response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = await response
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response to rpc {client_call_details.method}.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": str(client_call_details.method),
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ErrorStatsServiceGrpcAsyncIOTransport(ErrorStatsServiceTransport):
"""gRPC AsyncIO backend transport for ErrorStatsService.
@@ -227,7 +310,13 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientAIOInterceptor()
+ self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
+ self._logged_channel = self._grpc_channel
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@property
@@ -262,7 +351,7 @@ def list_group_stats(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_group_stats" not in self._stubs:
- self._stubs["list_group_stats"] = self.grpc_channel.unary_unary(
+ self._stubs["list_group_stats"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats",
request_serializer=error_stats_service.ListGroupStatsRequest.serialize,
response_deserializer=error_stats_service.ListGroupStatsResponse.deserialize,
@@ -291,7 +380,7 @@ def list_events(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_events" not in self._stubs:
- self._stubs["list_events"] = self.grpc_channel.unary_unary(
+ self._stubs["list_events"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents",
request_serializer=error_stats_service.ListEventsRequest.serialize,
response_deserializer=error_stats_service.ListEventsResponse.deserialize,
@@ -320,7 +409,7 @@ def delete_events(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_events" not in self._stubs:
- self._stubs["delete_events"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_events"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents",
request_serializer=error_stats_service.DeleteEventsRequest.serialize,
response_deserializer=error_stats_service.DeleteEventsResponse.deserialize,
@@ -330,25 +419,34 @@ def delete_events(
def _prep_wrapped_messages(self, client_info):
"""Precompute the wrapped methods, overriding the base class method to use async wrappers."""
self._wrapped_methods = {
- self.list_group_stats: gapic_v1.method_async.wrap_method(
+ self.list_group_stats: self._wrap_method(
self.list_group_stats,
default_timeout=None,
client_info=client_info,
),
- self.list_events: gapic_v1.method_async.wrap_method(
+ self.list_events: self._wrap_method(
self.list_events,
default_timeout=None,
client_info=client_info,
),
- self.delete_events: gapic_v1.method_async.wrap_method(
+ self.delete_events: self._wrap_method(
self.delete_events,
default_timeout=None,
client_info=client_info,
),
}
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
- return self.grpc_channel.close()
+ return self._logged_channel.close()
+
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
__all__ = ("ErrorStatsServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py
index f7cbbf8f..1b7a4cc5 100644
--- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,46 +13,55 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging
+import json # type: ignore
from google.auth.transport.requests import AuthorizedSession # type: ignore
-import json # type: ignore
-import grpc # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
from google.api_core import rest_helpers
from google.api_core import rest_streaming
-from google.api_core import path_template
from google.api_core import gapic_v1
+import google.protobuf
from google.protobuf import json_format
+
from requests import __version__ as requests_version
import dataclasses
-import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import warnings
+
+from google.cloud.errorreporting_v1beta1.types import error_stats_service
+
+
+from .rest_base import _BaseErrorStatsServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+try:
+ from google.api_core import client_logging # type: ignore
-from google.cloud.errorreporting_v1beta1.types import error_stats_service
-
-from .base import (
- ErrorStatsServiceTransport,
- DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO,
-)
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+_LOGGER = logging.getLogger(__name__)
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
grpc_version=None,
- rest_version=requests_version,
+ rest_version=f"requests@{requests_version}",
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ErrorStatsServiceRestInterceptor:
"""Interceptor for ErrorStatsService.
@@ -102,8 +111,10 @@ def post_list_group_stats(self, response):
def pre_delete_events(
self,
request: error_stats_service.DeleteEventsRequest,
- metadata: Sequence[Tuple[str, str]],
- ) -> Tuple[error_stats_service.DeleteEventsRequest, Sequence[Tuple[str, str]]]:
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_stats_service.DeleteEventsRequest, Sequence[Tuple[str, Union[str, bytes]]]
+ ]:
"""Pre-rpc interceptor for delete_events
Override in a subclass to manipulate the request or metadata
@@ -116,17 +127,45 @@ def post_delete_events(
) -> error_stats_service.DeleteEventsResponse:
"""Post-rpc interceptor for delete_events
- Override in a subclass to manipulate the response
+ DEPRECATED. Please use the `post_delete_events_with_metadata`
+ interceptor instead.
+
+ Override in a subclass to read or manipulate the response
after it is returned by the ErrorStatsService server but before
- it is returned to user code.
+ it is returned to user code. This `post_delete_events` interceptor runs
+ before the `post_delete_events_with_metadata` interceptor.
"""
return response
+ def post_delete_events_with_metadata(
+ self,
+ response: error_stats_service.DeleteEventsResponse,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_stats_service.DeleteEventsResponse,
+ Sequence[Tuple[str, Union[str, bytes]]],
+ ]:
+ """Post-rpc interceptor for delete_events
+
+ Override in a subclass to read or manipulate the response or metadata after it
+ is returned by the ErrorStatsService server but before it is returned to user code.
+
+ We recommend only using this `post_delete_events_with_metadata`
+ interceptor in new development instead of the `post_delete_events` interceptor.
+ When both interceptors are used, this `post_delete_events_with_metadata` interceptor runs after the
+ `post_delete_events` interceptor. The (possibly modified) response returned by
+ `post_delete_events` will be passed to
+ `post_delete_events_with_metadata`.
+ """
+ return response, metadata
+
def pre_list_events(
self,
request: error_stats_service.ListEventsRequest,
- metadata: Sequence[Tuple[str, str]],
- ) -> Tuple[error_stats_service.ListEventsRequest, Sequence[Tuple[str, str]]]:
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_stats_service.ListEventsRequest, Sequence[Tuple[str, Union[str, bytes]]]
+ ]:
"""Pre-rpc interceptor for list_events
Override in a subclass to manipulate the request or metadata
@@ -139,17 +178,45 @@ def post_list_events(
) -> error_stats_service.ListEventsResponse:
"""Post-rpc interceptor for list_events
- Override in a subclass to manipulate the response
+ DEPRECATED. Please use the `post_list_events_with_metadata`
+ interceptor instead.
+
+ Override in a subclass to read or manipulate the response
after it is returned by the ErrorStatsService server but before
- it is returned to user code.
+ it is returned to user code. This `post_list_events` interceptor runs
+ before the `post_list_events_with_metadata` interceptor.
"""
return response
+ def post_list_events_with_metadata(
+ self,
+ response: error_stats_service.ListEventsResponse,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_stats_service.ListEventsResponse, Sequence[Tuple[str, Union[str, bytes]]]
+ ]:
+ """Post-rpc interceptor for list_events
+
+ Override in a subclass to read or manipulate the response or metadata after it
+ is returned by the ErrorStatsService server but before it is returned to user code.
+
+ We recommend only using this `post_list_events_with_metadata`
+ interceptor in new development instead of the `post_list_events` interceptor.
+ When both interceptors are used, this `post_list_events_with_metadata` interceptor runs after the
+ `post_list_events` interceptor. The (possibly modified) response returned by
+ `post_list_events` will be passed to
+ `post_list_events_with_metadata`.
+ """
+ return response, metadata
+
def pre_list_group_stats(
self,
request: error_stats_service.ListGroupStatsRequest,
- metadata: Sequence[Tuple[str, str]],
- ) -> Tuple[error_stats_service.ListGroupStatsRequest, Sequence[Tuple[str, str]]]:
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_stats_service.ListGroupStatsRequest,
+ Sequence[Tuple[str, Union[str, bytes]]],
+ ]:
"""Pre-rpc interceptor for list_group_stats
Override in a subclass to manipulate the request or metadata
@@ -162,12 +229,38 @@ def post_list_group_stats(
) -> error_stats_service.ListGroupStatsResponse:
"""Post-rpc interceptor for list_group_stats
- Override in a subclass to manipulate the response
+ DEPRECATED. Please use the `post_list_group_stats_with_metadata`
+ interceptor instead.
+
+ Override in a subclass to read or manipulate the response
after it is returned by the ErrorStatsService server but before
- it is returned to user code.
+ it is returned to user code. This `post_list_group_stats` interceptor runs
+ before the `post_list_group_stats_with_metadata` interceptor.
"""
return response
+ def post_list_group_stats_with_metadata(
+ self,
+ response: error_stats_service.ListGroupStatsResponse,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ error_stats_service.ListGroupStatsResponse,
+ Sequence[Tuple[str, Union[str, bytes]]],
+ ]:
+ """Post-rpc interceptor for list_group_stats
+
+ Override in a subclass to read or manipulate the response or metadata after it
+ is returned by the ErrorStatsService server but before it is returned to user code.
+
+ We recommend only using this `post_list_group_stats_with_metadata`
+ interceptor in new development instead of the `post_list_group_stats` interceptor.
+ When both interceptors are used, this `post_list_group_stats_with_metadata` interceptor runs after the
+ `post_list_group_stats` interceptor. The (possibly modified) response returned by
+ `post_list_group_stats` will be passed to
+ `post_list_group_stats_with_metadata`.
+ """
+ return response, metadata
+
@dataclasses.dataclass
class ErrorStatsServiceRestStub:
@@ -176,8 +269,8 @@ class ErrorStatsServiceRestStub:
_interceptor: ErrorStatsServiceRestInterceptor
-class ErrorStatsServiceRestTransport(ErrorStatsServiceTransport):
- """REST backend transport for ErrorStatsService.
+class ErrorStatsServiceRestTransport(_BaseErrorStatsServiceRestTransport):
+ """REST backend synchronous transport for ErrorStatsService.
An API for retrieving and managing error statistics as well
as data for individual events.
@@ -187,7 +280,6 @@ class ErrorStatsServiceRestTransport(ErrorStatsServiceTransport):
and call it.
It sends JSON representations of protocol buffers over HTTP/1.1
-
"""
def __init__(
@@ -241,21 +333,12 @@ def __init__(
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
# TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
# credentials object
- maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
+ url_scheme=url_scheme,
api_audience=api_audience,
)
self._session = AuthorizedSession(
@@ -266,19 +349,33 @@ def __init__(
self._interceptor = interceptor or ErrorStatsServiceRestInterceptor()
self._prep_wrapped_messages(client_info)
- class _DeleteEvents(ErrorStatsServiceRestStub):
+ class _DeleteEvents(
+ _BaseErrorStatsServiceRestTransport._BaseDeleteEvents, ErrorStatsServiceRestStub
+ ):
def __hash__(self):
- return hash("DeleteEvents")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("ErrorStatsServiceRestTransport.DeleteEvents")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -286,7 +383,7 @@ def __call__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> error_stats_service.DeleteEventsResponse:
r"""Call the delete events method over HTTP.
@@ -296,8 +393,10 @@ def __call__(
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.error_stats_service.DeleteEventsResponse:
@@ -306,42 +405,55 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "delete",
- "uri": "/v1beta1/{project_name=projects/*}/events",
- },
- {
- "method": "delete",
- "uri": "/v1beta1/{project_name=projects/*/locations/*}/events",
- },
- ]
- request, metadata = self._interceptor.pre_delete_events(request, metadata)
- pb_request = error_stats_service.DeleteEventsRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
+ http_options = (
+ _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_http_options()
+ )
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ request, metadata = self._interceptor.pre_delete_events(request, metadata)
+ transcoded_request = _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
- query_params["$alt"] = "json;enum-encoding=int"
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ request_url = "{host}{uri}".format(
+ host=self._host, uri=transcoded_request["uri"]
+ )
+ method = transcoded_request["method"]
+ try:
+ request_payload = type(request).to_json(request)
+ except:
+ request_payload = None
+ http_request = {
+ "payload": request_payload,
+ "requestMethod": method,
+ "requestUrl": request_url,
+ "headers": dict(metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.DeleteEvents",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": "DeleteEvents",
+ "httpRequest": http_request,
+ "metadata": http_request["headers"],
+ },
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = ErrorStatsServiceRestTransport._DeleteEvents._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -354,24 +466,64 @@ def __call__(
pb_resp = error_stats_service.DeleteEventsResponse.pb(resp)
json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+
resp = self._interceptor.post_delete_events(resp)
+ response_metadata = [(k, str(v)) for k, v in response.headers.items()]
+ resp, _ = self._interceptor.post_delete_events_with_metadata(
+ resp, response_metadata
+ )
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ try:
+ response_payload = error_stats_service.DeleteEventsResponse.to_json(
+ response
+ )
+ except:
+ response_payload = None
+ http_response = {
+ "payload": response_payload,
+ "headers": dict(response.headers),
+ "status": response.status_code,
+ }
+ _LOGGER.debug(
+ "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.delete_events",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": "DeleteEvents",
+ "metadata": http_response["headers"],
+ "httpResponse": http_response,
+ },
+ )
return resp
- class _ListEvents(ErrorStatsServiceRestStub):
+ class _ListEvents(
+ _BaseErrorStatsServiceRestTransport._BaseListEvents, ErrorStatsServiceRestStub
+ ):
def __hash__(self):
- return hash("ListEvents")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {
- "groupId": "",
- }
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("ErrorStatsServiceRestTransport.ListEvents")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -379,7 +531,7 @@ def __call__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> error_stats_service.ListEventsResponse:
r"""Call the list events method over HTTP.
@@ -390,8 +542,10 @@ def __call__(
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.error_stats_service.ListEventsResponse:
@@ -400,42 +554,55 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1beta1/{project_name=projects/*}/events",
- },
- {
- "method": "get",
- "uri": "/v1beta1/{project_name=projects/*/locations/*}/events",
- },
- ]
- request, metadata = self._interceptor.pre_list_events(request, metadata)
- pb_request = error_stats_service.ListEventsRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
+ http_options = (
+ _BaseErrorStatsServiceRestTransport._BaseListEvents._get_http_options()
+ )
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ request, metadata = self._interceptor.pre_list_events(request, metadata)
+ transcoded_request = _BaseErrorStatsServiceRestTransport._BaseListEvents._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseErrorStatsServiceRestTransport._BaseListEvents._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
- query_params["$alt"] = "json;enum-encoding=int"
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ request_url = "{host}{uri}".format(
+ host=self._host, uri=transcoded_request["uri"]
+ )
+ method = transcoded_request["method"]
+ try:
+ request_payload = type(request).to_json(request)
+ except:
+ request_payload = None
+ http_request = {
+ "payload": request_payload,
+ "requestMethod": method,
+ "requestUrl": request_url,
+ "headers": dict(metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.ListEvents",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": "ListEvents",
+ "httpRequest": http_request,
+ "metadata": http_request["headers"],
+ },
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = ErrorStatsServiceRestTransport._ListEvents._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -448,22 +615,65 @@ def __call__(
pb_resp = error_stats_service.ListEventsResponse.pb(resp)
json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+
resp = self._interceptor.post_list_events(resp)
+ response_metadata = [(k, str(v)) for k, v in response.headers.items()]
+ resp, _ = self._interceptor.post_list_events_with_metadata(
+ resp, response_metadata
+ )
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ try:
+ response_payload = error_stats_service.ListEventsResponse.to_json(
+ response
+ )
+ except:
+ response_payload = None
+ http_response = {
+ "payload": response_payload,
+ "headers": dict(response.headers),
+ "status": response.status_code,
+ }
+ _LOGGER.debug(
+ "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.list_events",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": "ListEvents",
+ "metadata": http_response["headers"],
+ "httpResponse": http_response,
+ },
+ )
return resp
- class _ListGroupStats(ErrorStatsServiceRestStub):
+ class _ListGroupStats(
+ _BaseErrorStatsServiceRestTransport._BaseListGroupStats,
+ ErrorStatsServiceRestStub,
+ ):
def __hash__(self):
- return hash("ListGroupStats")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("ErrorStatsServiceRestTransport.ListGroupStats")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ )
+ return response
def __call__(
self,
@@ -471,7 +681,7 @@ def __call__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> error_stats_service.ListGroupStatsResponse:
r"""Call the list group stats method over HTTP.
@@ -481,8 +691,10 @@ def __call__(
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.error_stats_service.ListGroupStatsResponse:
@@ -491,44 +703,57 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "get",
- "uri": "/v1beta1/{project_name=projects/*}/groupStats",
- },
- {
- "method": "get",
- "uri": "/v1beta1/{project_name=projects/*/locations/*}/groupStats",
- },
- ]
+ http_options = (
+ _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_http_options()
+ )
+
request, metadata = self._interceptor.pre_list_group_stats(
request, metadata
)
- pb_request = error_stats_service.ListGroupStatsRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
+ transcoded_request = _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_transcoded_request(
+ http_options, request
+ )
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
- query_params["$alt"] = "json;enum-encoding=int"
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ request_url = "{host}{uri}".format(
+ host=self._host, uri=transcoded_request["uri"]
+ )
+ method = transcoded_request["method"]
+ try:
+ request_payload = type(request).to_json(request)
+ except:
+ request_payload = None
+ http_request = {
+ "payload": request_payload,
+ "requestMethod": method,
+ "requestUrl": request_url,
+ "headers": dict(metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.ListGroupStats",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": "ListGroupStats",
+ "httpRequest": http_request,
+ "metadata": http_request["headers"],
+ },
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
+ response = ErrorStatsServiceRestTransport._ListGroupStats._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -541,7 +766,35 @@ def __call__(
pb_resp = error_stats_service.ListGroupStatsResponse.pb(resp)
json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+
resp = self._interceptor.post_list_group_stats(resp)
+ response_metadata = [(k, str(v)) for k, v in response.headers.items()]
+ resp, _ = self._interceptor.post_list_group_stats_with_metadata(
+ resp, response_metadata
+ )
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ try:
+ response_payload = (
+ error_stats_service.ListGroupStatsResponse.to_json(response)
+ )
+ except:
+ response_payload = None
+ http_response = {
+ "payload": response_payload,
+ "headers": dict(response.headers),
+ "status": response.status_code,
+ }
+ _LOGGER.debug(
+ "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.list_group_stats",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService",
+ "rpcName": "ListGroupStats",
+ "metadata": http_response["headers"],
+ "httpResponse": http_response,
+ },
+ )
return resp
@property
diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py
new file mode 100644
index 00000000..77269984
--- /dev/null
+++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py
@@ -0,0 +1,248 @@
+# -*- coding: utf-8 -*-
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.errorreporting_v1beta1.types import error_stats_service
+
+
+class _BaseErrorStatsServiceRestTransport(ErrorStatsServiceTransport):
+ """Base REST backend transport for ErrorStatsService.
+
+ Note: This class is not meant to be used directly. Use its sync and
+ async sub-classes instead.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "clouderrorreporting.googleapis.com",
+ credentials: Optional[Any] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ api_audience: Optional[str] = None,
+ ) -> None:
+ """Instantiate the transport.
+ Args:
+ host (Optional[str]):
+ The hostname to connect to (default: 'clouderrorreporting.googleapis.com').
+ credentials (Optional[Any]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you are developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ """
+ # Run the base constructor
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ api_audience=api_audience,
+ )
+
+ class _BaseDeleteEvents:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "delete",
+ "uri": "/v1beta1/{project_name=projects/*}/events",
+ },
+ {
+ "method": "delete",
+ "uri": "/v1beta1/{project_name=projects/*/locations/*}/events",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = error_stats_service.DeleteEventsRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseListEvents:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {
+ "groupId": "",
+ }
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1beta1/{project_name=projects/*}/events",
+ },
+ {
+ "method": "get",
+ "uri": "/v1beta1/{project_name=projects/*/locations/*}/events",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = error_stats_service.ListEventsRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseErrorStatsServiceRestTransport._BaseListEvents._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+ class _BaseListGroupStats:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "get",
+ "uri": "/v1beta1/{project_name=projects/*}/groupStats",
+ },
+ {
+ "method": "get",
+ "uri": "/v1beta1/{project_name=projects/*/locations/*}/groupStats",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = error_stats_service.ListGroupStatsRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+
+__all__ = ("_BaseErrorStatsServiceRestTransport",)
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py
index d9e40c58..2e3c1322 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py
index 6c0ef660..33c90024 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging as std_logging
from collections import OrderedDict
import re
from typing import (
@@ -36,6 +37,7 @@
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
@@ -48,6 +50,15 @@
from .transports.grpc_asyncio import ReportErrorsServiceGrpcAsyncIOTransport
from .client import ReportErrorsServiceClient
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
class ReportErrorsServiceAsyncClient:
"""An API for reporting error events."""
@@ -255,6 +266,28 @@ def __init__(
client_info=client_info,
)
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceAsyncClient`.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "universeDomain": getattr(
+ self._client._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._client._transport, "_credentials")
+ else {
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "credentialsType": None,
+ },
+ )
+
async def report_error_event(
self,
request: Optional[
@@ -265,7 +298,7 @@ async def report_error_event(
event: Optional[report_errors_service.ReportedErrorEvent] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> report_errors_service.ReportErrorEventResponse:
r"""Report an individual error event and record the event to a log.
@@ -345,8 +378,10 @@ async def sample_report_error_event():
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse:
@@ -358,7 +393,10 @@ async def sample_report_error_event():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name, event])
+ flattened_params = [project_name, event]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -416,5 +454,8 @@ async def __aexit__(self, exc_type, exc, tb):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
__all__ = ("ReportErrorsServiceAsyncClient",)
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py
index 212c5811..9dc31d29 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,6 +14,9 @@
# limitations under the License.
#
from collections import OrderedDict
+from http import HTTPStatus
+import json
+import logging as std_logging
import os
import re
from typing import (
@@ -42,12 +45,22 @@
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import ReportErrorsServiceGrpcTransport
@@ -437,52 +450,45 @@ def _get_universe_domain(
raise ValueError("Universe Domain cannot be an empty string.")
return universe_domain
- @staticmethod
- def _compare_universes(
- client_universe: str, credentials: ga_credentials.Credentials
- ) -> bool:
- """Returns True iff the universe domains used by the client and credentials match.
-
- Args:
- client_universe (str): The universe domain configured via the client options.
- credentials (ga_credentials.Credentials): The credentials being used in the client.
+ def _validate_universe_domain(self):
+ """Validates client's and credentials' universe domains are consistent.
Returns:
- bool: True iff client_universe matches the universe in credentials.
+ bool: True iff the configured universe domain is valid.
Raises:
- ValueError: when client_universe does not match the universe in credentials.
+ ValueError: If the configured universe domain is not valid.
"""
- default_universe = ReportErrorsServiceClient._DEFAULT_UNIVERSE
- credentials_universe = getattr(credentials, "universe_domain", default_universe)
-
- if client_universe != credentials_universe:
- raise ValueError(
- "The configured universe domain "
- f"({client_universe}) does not match the universe domain "
- f"found in the credentials ({credentials_universe}). "
- "If you haven't configured the universe domain explicitly, "
- f"`{default_universe}` is the default."
- )
+ # NOTE (b/349488459): universe validation is disabled until further notice.
return True
- def _validate_universe_domain(self):
- """Validates client's and credentials' universe domains are consistent.
-
- Returns:
- bool: True iff the configured universe domain is valid.
+ def _add_cred_info_for_auth_errors(
+ self, error: core_exceptions.GoogleAPICallError
+ ) -> None:
+ """Adds credential info string to error details for 401/403/404 errors.
- Raises:
- ValueError: If the configured universe domain is not valid.
+ Args:
+ error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info.
"""
- self._is_universe_domain_valid = (
- self._is_universe_domain_valid
- or ReportErrorsServiceClient._compare_universes(
- self.universe_domain, self.transport._credentials
- )
- )
- return self._is_universe_domain_valid
+ if error.code not in [
+ HTTPStatus.UNAUTHORIZED,
+ HTTPStatus.FORBIDDEN,
+ HTTPStatus.NOT_FOUND,
+ ]:
+ return
+
+ cred = self._transport._credentials
+
+ # get_cred_info is only available in google-auth>=2.35.0
+ if not hasattr(cred, "get_cred_info"):
+ return
+
+ # ignore the type check since pypy test fails when get_cred_info
+ # is not available
+ cred_info = cred.get_cred_info() # type: ignore
+ if cred_info and hasattr(error._details, "append"):
+ error._details.append(json.dumps(cred_info))
@property
def api_endpoint(self):
@@ -592,6 +598,10 @@ def __init__(
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
+ if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
+ # Setup logging.
+ client_logging.initialize_logging()
+
api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
@@ -658,6 +668,29 @@ def __init__(
api_audience=self._client_options.api_audience,
)
+ if "async" not in str(self._transport):
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceClient`.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "universeDomain": getattr(
+ self._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._transport, "_credentials")
+ else {
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "credentialsType": None,
+ },
+ )
+
def report_error_event(
self,
request: Optional[
@@ -668,7 +701,7 @@ def report_error_event(
event: Optional[report_errors_service.ReportedErrorEvent] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> report_errors_service.ReportErrorEventResponse:
r"""Report an individual error event and record the event to a log.
@@ -748,8 +781,10 @@ def sample_report_error_event():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse:
@@ -761,7 +796,10 @@ def sample_report_error_event():
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- has_flattened_params = any([project_name, event])
+ flattened_params = [project_name, event]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
@@ -823,5 +861,7 @@ def __exit__(self, type, value, traceback):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
__all__ = ("ReportErrorsServiceClient",)
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst
new file mode 100644
index 00000000..d70e9010
--- /dev/null
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`ReportErrorsServiceTransport` is the ABC for all transports.
+- public child `ReportErrorsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `ReportErrorsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseReportErrorsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `ReportErrorsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py
index bf8256bc..83aa46c6 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py
index 9bb43400..31f69093 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,6 +25,7 @@
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
from google.cloud.errorreporting_v1beta1.types import report_errors_service
@@ -32,6 +33,9 @@
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ReportErrorsServiceTransport(abc.ABC):
"""Abstract transport class for ReportErrorsService."""
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py
index 0cbcc619..023d0f0f 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import json
+import logging as std_logging
+import pickle
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
@@ -21,12 +24,89 @@
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from .base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = response.result()
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response for {client_call_details.method}.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "rpcName": client_call_details.method,
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ReportErrorsServiceGrpcTransport(ReportErrorsServiceTransport):
"""gRPC backend transport for ReportErrorsService.
@@ -180,7 +260,12 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientInterceptor()
+ self._logged_channel = grpc.intercept_channel(
+ self._grpc_channel, self._interceptor
+ )
+
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
@@ -278,7 +363,7 @@ def report_error_event(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "report_error_event" not in self._stubs:
- self._stubs["report_error_event"] = self.grpc_channel.unary_unary(
+ self._stubs["report_error_event"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent",
request_serializer=report_errors_service.ReportErrorEventRequest.serialize,
response_deserializer=report_errors_service.ReportErrorEventResponse.deserialize,
@@ -286,7 +371,7 @@ def report_error_event(
return self._stubs["report_error_event"]
def close(self):
- self.grpc_channel.close()
+ self._logged_channel.close()
@property
def kind(self) -> str:
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py
index 5b3612ba..18f942ce 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
+import json
+import pickle
+import logging as std_logging
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
@@ -22,14 +26,93 @@
from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from .base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ReportErrorsServiceGrpcTransport
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientAIOInterceptor(
+ grpc.aio.UnaryUnaryClientInterceptor
+): # pragma: NO COVER
+ async def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = await continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = await response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = await response
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response to rpc {client_call_details.method}.",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "rpcName": str(client_call_details.method),
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ReportErrorsServiceGrpcAsyncIOTransport(ReportErrorsServiceTransport):
"""gRPC AsyncIO backend transport for ReportErrorsService.
@@ -226,7 +309,13 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientAIOInterceptor()
+ self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
+ self._logged_channel = self._grpc_channel
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@property
@@ -282,7 +371,7 @@ def report_error_event(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "report_error_event" not in self._stubs:
- self._stubs["report_error_event"] = self.grpc_channel.unary_unary(
+ self._stubs["report_error_event"] = self._logged_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent",
request_serializer=report_errors_service.ReportErrorEventRequest.serialize,
response_deserializer=report_errors_service.ReportErrorEventResponse.deserialize,
@@ -292,15 +381,24 @@ def report_error_event(
def _prep_wrapped_messages(self, client_info):
"""Precompute the wrapped methods, overriding the base class method to use async wrappers."""
self._wrapped_methods = {
- self.report_error_event: gapic_v1.method_async.wrap_method(
+ self.report_error_event: self._wrap_method(
self.report_error_event,
default_timeout=None,
client_info=client_info,
),
}
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
- return self.grpc_channel.close()
+ return self._logged_channel.close()
+
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
__all__ = ("ReportErrorsServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py
index 64aa2ae0..98c63125 100644
--- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,46 +13,55 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging
+import json # type: ignore
from google.auth.transport.requests import AuthorizedSession # type: ignore
-import json # type: ignore
-import grpc # type: ignore
-from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.api_core import exceptions as core_exceptions
from google.api_core import retry as retries
from google.api_core import rest_helpers
from google.api_core import rest_streaming
-from google.api_core import path_template
from google.api_core import gapic_v1
+import google.protobuf
from google.protobuf import json_format
+
from requests import __version__ as requests_version
import dataclasses
-import re
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import warnings
+
+from google.cloud.errorreporting_v1beta1.types import report_errors_service
+
+
+from .rest_base import _BaseReportErrorsServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+try:
+ from google.api_core import client_logging # type: ignore
-from google.cloud.errorreporting_v1beta1.types import report_errors_service
-
-from .base import (
- ReportErrorsServiceTransport,
- DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO,
-)
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+_LOGGER = logging.getLogger(__name__)
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
grpc_version=None,
- rest_version=requests_version,
+ rest_version=f"requests@{requests_version}",
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ReportErrorsServiceRestInterceptor:
"""Interceptor for ReportErrorsService.
@@ -86,9 +95,10 @@ def post_report_error_event(self, response):
def pre_report_error_event(
self,
request: report_errors_service.ReportErrorEventRequest,
- metadata: Sequence[Tuple[str, str]],
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
) -> Tuple[
- report_errors_service.ReportErrorEventRequest, Sequence[Tuple[str, str]]
+ report_errors_service.ReportErrorEventRequest,
+ Sequence[Tuple[str, Union[str, bytes]]],
]:
"""Pre-rpc interceptor for report_error_event
@@ -102,12 +112,38 @@ def post_report_error_event(
) -> report_errors_service.ReportErrorEventResponse:
"""Post-rpc interceptor for report_error_event
- Override in a subclass to manipulate the response
+ DEPRECATED. Please use the `post_report_error_event_with_metadata`
+ interceptor instead.
+
+ Override in a subclass to read or manipulate the response
after it is returned by the ReportErrorsService server but before
- it is returned to user code.
+ it is returned to user code. This `post_report_error_event` interceptor runs
+ before the `post_report_error_event_with_metadata` interceptor.
"""
return response
+ def post_report_error_event_with_metadata(
+ self,
+ response: report_errors_service.ReportErrorEventResponse,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]],
+ ) -> Tuple[
+ report_errors_service.ReportErrorEventResponse,
+ Sequence[Tuple[str, Union[str, bytes]]],
+ ]:
+ """Post-rpc interceptor for report_error_event
+
+ Override in a subclass to read or manipulate the response or metadata after it
+ is returned by the ReportErrorsService server but before it is returned to user code.
+
+ We recommend only using this `post_report_error_event_with_metadata`
+ interceptor in new development instead of the `post_report_error_event` interceptor.
+ When both interceptors are used, this `post_report_error_event_with_metadata` interceptor runs after the
+ `post_report_error_event` interceptor. The (possibly modified) response returned by
+ `post_report_error_event` will be passed to
+ `post_report_error_event_with_metadata`.
+ """
+ return response, metadata
+
@dataclasses.dataclass
class ReportErrorsServiceRestStub:
@@ -116,8 +152,8 @@ class ReportErrorsServiceRestStub:
_interceptor: ReportErrorsServiceRestInterceptor
-class ReportErrorsServiceRestTransport(ReportErrorsServiceTransport):
- """REST backend transport for ReportErrorsService.
+class ReportErrorsServiceRestTransport(_BaseReportErrorsServiceRestTransport):
+ """REST backend synchronous transport for ReportErrorsService.
An API for reporting error events.
@@ -126,7 +162,6 @@ class ReportErrorsServiceRestTransport(ReportErrorsServiceTransport):
and call it.
It sends JSON representations of protocol buffers over HTTP/1.1
-
"""
def __init__(
@@ -180,21 +215,12 @@ def __init__(
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
# TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
# credentials object
- maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
+ url_scheme=url_scheme,
api_audience=api_audience,
)
self._session = AuthorizedSession(
@@ -205,19 +231,35 @@ def __init__(
self._interceptor = interceptor or ReportErrorsServiceRestInterceptor()
self._prep_wrapped_messages(client_info)
- class _ReportErrorEvent(ReportErrorsServiceRestStub):
+ class _ReportErrorEvent(
+ _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent,
+ ReportErrorsServiceRestStub,
+ ):
def __hash__(self):
- return hash("ReportErrorEvent")
-
- __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
-
- @classmethod
- def _get_unset_required_fields(cls, message_dict):
- return {
- k: v
- for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
- if k not in message_dict
- }
+ return hash("ReportErrorsServiceRestTransport.ReportErrorEvent")
+
+ @staticmethod
+ def _get_response(
+ host,
+ metadata,
+ query_params,
+ session,
+ timeout,
+ transcoded_request,
+ body=None,
+ ):
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(session, method)(
+ "{host}{uri}".format(host=host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params, strict=True),
+ data=body,
+ )
+ return response
def __call__(
self,
@@ -225,7 +267,7 @@ def __call__(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> report_errors_service.ReportErrorEventResponse:
r"""Call the report error event method over HTTP.
@@ -236,8 +278,10 @@ def __call__(
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.report_errors_service.ReportErrorEventResponse:
@@ -247,47 +291,62 @@ def __call__(
"""
- http_options: List[Dict[str, str]] = [
- {
- "method": "post",
- "uri": "/v1beta1/{project_name=projects/*}/events:report",
- "body": "event",
- },
- ]
+ http_options = (
+ _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_http_options()
+ )
+
request, metadata = self._interceptor.pre_report_error_event(
request, metadata
)
- pb_request = report_errors_service.ReportErrorEventRequest.pb(request)
- transcoded_request = path_template.transcode(http_options, pb_request)
-
- # Jsonify the request body
+ transcoded_request = _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_transcoded_request(
+ http_options, request
+ )
- body = json_format.MessageToJson(
- transcoded_request["body"], use_integers_for_enums=True
+ body = _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_request_body_json(
+ transcoded_request
)
- uri = transcoded_request["uri"]
- method = transcoded_request["method"]
# Jsonify the query params
- query_params = json.loads(
- json_format.MessageToJson(
- transcoded_request["query_params"],
- use_integers_for_enums=True,
- )
+ query_params = _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_query_params_json(
+ transcoded_request
)
- query_params.update(self._get_unset_required_fields(query_params))
- query_params["$alt"] = "json;enum-encoding=int"
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ request_url = "{host}{uri}".format(
+ host=self._host, uri=transcoded_request["uri"]
+ )
+ method = transcoded_request["method"]
+ try:
+ request_payload = type(request).to_json(request)
+ except:
+ request_payload = None
+ http_request = {
+ "payload": request_payload,
+ "requestMethod": method,
+ "requestUrl": request_url,
+ "headers": dict(metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceClient.ReportErrorEvent",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "rpcName": "ReportErrorEvent",
+ "httpRequest": http_request,
+ "metadata": http_request["headers"],
+ },
+ )
# Send the request
- headers = dict(metadata)
- headers["Content-Type"] = "application/json"
- response = getattr(self._session, method)(
- "{host}{uri}".format(host=self._host, uri=uri),
- timeout=timeout,
- headers=headers,
- params=rest_helpers.flatten_query_params(query_params, strict=True),
- data=body,
+ response = ReportErrorsServiceRestTransport._ReportErrorEvent._get_response(
+ self._host,
+ metadata,
+ query_params,
+ self._session,
+ timeout,
+ transcoded_request,
+ body,
)
# In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
@@ -300,7 +359,35 @@ def __call__(
pb_resp = report_errors_service.ReportErrorEventResponse.pb(resp)
json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+
resp = self._interceptor.post_report_error_event(resp)
+ response_metadata = [(k, str(v)) for k, v in response.headers.items()]
+ resp, _ = self._interceptor.post_report_error_event_with_metadata(
+ resp, response_metadata
+ )
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ logging.DEBUG
+ ): # pragma: NO COVER
+ try:
+ response_payload = (
+ report_errors_service.ReportErrorEventResponse.to_json(response)
+ )
+ except:
+ response_payload = None
+ http_response = {
+ "payload": response_payload,
+ "headers": dict(response.headers),
+ "status": response.status_code,
+ }
+ _LOGGER.debug(
+ "Received response for google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceClient.report_error_event",
+ extra={
+ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService",
+ "rpcName": "ReportErrorEvent",
+ "metadata": http_response["headers"],
+ "httpResponse": http_response,
+ },
+ )
return resp
@property
diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py
new file mode 100644
index 00000000..f6495841
--- /dev/null
+++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.errorreporting_v1beta1.types import report_errors_service
+
+
+class _BaseReportErrorsServiceRestTransport(ReportErrorsServiceTransport):
+ """Base REST backend transport for ReportErrorsService.
+
+ Note: This class is not meant to be used directly. Use its sync and
+ async sub-classes instead.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "clouderrorreporting.googleapis.com",
+ credentials: Optional[Any] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ api_audience: Optional[str] = None,
+ ) -> None:
+ """Instantiate the transport.
+ Args:
+ host (Optional[str]):
+ The hostname to connect to (default: 'clouderrorreporting.googleapis.com').
+ credentials (Optional[Any]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you are developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ """
+ # Run the base constructor
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ api_audience=api_audience,
+ )
+
+ class _BaseReportErrorEvent:
+ def __hash__(self): # pragma: NO COVER
+ return NotImplementedError("__hash__ must be implemented.")
+
+ __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {}
+
+ @classmethod
+ def _get_unset_required_fields(cls, message_dict):
+ return {
+ k: v
+ for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items()
+ if k not in message_dict
+ }
+
+ @staticmethod
+ def _get_http_options():
+ http_options: List[Dict[str, str]] = [
+ {
+ "method": "post",
+ "uri": "/v1beta1/{project_name=projects/*}/events:report",
+ "body": "event",
+ },
+ ]
+ return http_options
+
+ @staticmethod
+ def _get_transcoded_request(http_options, request):
+ pb_request = report_errors_service.ReportErrorEventRequest.pb(request)
+ transcoded_request = path_template.transcode(http_options, pb_request)
+ return transcoded_request
+
+ @staticmethod
+ def _get_request_body_json(transcoded_request):
+ # Jsonify the request body
+
+ body = json_format.MessageToJson(
+ transcoded_request["body"], use_integers_for_enums=True
+ )
+ return body
+
+ @staticmethod
+ def _get_query_params_json(transcoded_request):
+ query_params = json.loads(
+ json_format.MessageToJson(
+ transcoded_request["query_params"],
+ use_integers_for_enums=True,
+ )
+ )
+ query_params.update(
+ _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_unset_required_fields(
+ query_params
+ )
+ )
+
+ query_params["$alt"] = "json;enum-encoding=int"
+ return query_params
+
+
+__all__ = ("_BaseReportErrorsServiceRestTransport",)
diff --git a/google/cloud/errorreporting_v1beta1/types/__init__.py b/google/cloud/errorreporting_v1beta1/types/__init__.py
index 42ab63d4..4acdb9a1 100644
--- a/google/cloud/errorreporting_v1beta1/types/__init__.py
+++ b/google/cloud/errorreporting_v1beta1/types/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py
index 1278fa58..84320a89 100644
--- a/google/cloud/errorreporting_v1beta1/types/common.py
+++ b/google/cloud/errorreporting_v1beta1/types/common.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py
index cfd82fa7..be126768 100644
--- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py
+++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py
index 93158ab5..0285d9a8 100644
--- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py
+++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py
index 87c1b14d..ebd33856 100644
--- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py
+++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/noxfile.py b/noxfile.py
index 7540caad..bc66951c 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -34,7 +34,15 @@
DEFAULT_PYTHON_VERSION = "3.8"
-UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+UNIT_TEST_PYTHON_VERSIONS: List[str] = [
+ "3.7",
+ "3.8",
+ "3.9",
+ "3.10",
+ "3.11",
+ "3.12",
+ "3.13",
+]
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
"asyncmock",
@@ -62,7 +70,6 @@
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
nox.options.sessions = [
"unit",
"system",
@@ -71,6 +78,7 @@
"lint_setup_py",
"blacken",
"docs",
+ "docfx",
"format",
]
@@ -167,7 +175,7 @@ def install_unittest_dependencies(session, *constraints):
def unit(session, protobuf_implementation):
# Install all test dependencies, then install this package in-place.
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"):
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
session.skip("cpp implementation is not supported in python 3.11+")
constraints_path = str(
@@ -367,7 +375,7 @@ def docfx(session):
)
-@nox.session(python="3.12")
+@nox.session(python="3.13")
@nox.parametrize(
"protobuf_implementation",
["python", "upb", "cpp"],
@@ -375,7 +383,7 @@ def docfx(session):
def prerelease_deps(session, protobuf_implementation):
"""Run all tests with prerelease versions of dependencies installed."""
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"):
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
session.skip("cpp implementation is not supported in python 3.11+")
# Install all dependencies
diff --git a/owlbot.py b/owlbot.py
index ded8fbe7..0f851388 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -79,7 +79,7 @@
# --------------------------------------------------------------------------
# add shared environment variables to test configs
-tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"]
+tracked_subdirs = ["continuous", "presubmit", "samples"]
for subdir in tracked_subdirs:
for path, subdirs, files in os.walk(f".kokoro/{subdir}"):
for name in files:
diff --git a/renovate.json b/renovate.json
index 39b2a0ec..c7875c46 100644
--- a/renovate.json
+++ b/renovate.json
@@ -5,7 +5,7 @@
":preserveSemverRanges",
":disableDependencyDashboard"
],
- "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"],
"pip_requirements": {
"fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
}
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py
index dc84897a..4a0dabf5 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py
index e260be1f..79c76d18 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py
index 62a82368..58a7d5b3 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py
index 7239a97f..9fa53da6 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py
index 4bf09138..6479d386 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py
index 6194de23..60dcebe9 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py
index 49f25ac7..3b143dca 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py
index 99d4ca32..273d4a44 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py
index 8ffdf643..92ef3ed1 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py
index a696194b..ed6b7f5a 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py
index 5dc4e264..b2c9d42e 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py
index f75f5591..b413c1f6 100644
--- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py
+++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json
index f8a87619..226f3879 100644
--- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json
+++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-error-reporting",
- "version": "1.11.1"
+ "version": "1.12.0"
},
"snippets": [
{
@@ -47,7 +47,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup",
@@ -127,7 +127,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup",
@@ -208,7 +208,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup",
@@ -288,7 +288,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup",
@@ -369,7 +369,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse",
@@ -449,7 +449,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse",
@@ -534,7 +534,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsAsyncPager",
@@ -618,7 +618,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsPager",
@@ -703,7 +703,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsAsyncPager",
@@ -787,7 +787,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsPager",
@@ -872,7 +872,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse",
@@ -956,7 +956,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse",
diff --git a/scripts/fixup_errorreporting_v1beta1_keywords.py b/scripts/fixup_errorreporting_v1beta1_keywords.py
index 69a5a229..60f94ffc 100644
--- a/scripts/fixup_errorreporting_v1beta1_keywords.py
+++ b/scripts/fixup_errorreporting_v1beta1_keywords.py
@@ -1,6 +1,6 @@
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 05235008..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
-[bdist_wheel]
-universal = 1
diff --git a/setup.py b/setup.py
index 88311343..530e3c7f 100644
--- a/setup.py
+++ b/setup.py
@@ -38,14 +38,15 @@
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-cloud-logging>=1.14.0, <4.0.0dev",
+ "google-cloud-logging>=1.14.0, <4.0.0",
# Exclude incompatible versions of `google-auth`
# See https://github.com/googleapis/google-cloud-python/issues/12364
- "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0",
- "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*",
- "proto-plus >= 1.22.0, <2.0.0dev",
- "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'",
- "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
+ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0",
+ "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*",
+ "proto-plus >= 1.22.0, <2.0.0",
+ "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'",
+ "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'",
+ "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
]
url = "https://github.com/googleapis/python-error-reporting"
@@ -82,6 +83,7 @@
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
"Operating System :: OS Independent",
"Topic :: Internet",
],
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
new file mode 100644
index 00000000..c20a7781
--- /dev/null
+++ b/testing/constraints-3.13.txt
@@ -0,0 +1,11 @@
+# We use the constraints file for the latest Python version
+# (currently this file) to check that the latest
+# major versions of dependencies are supported in setup.py.
+# List all library dependencies and extras in this file.
+# Require the latest major version be installed for each dependency.
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0",
+# Then this file should have google-cloud-foo>=1
+google-api-core>=2
+google-auth>=2
+proto-plus>=1
+protobuf>=6
diff --git a/tests/__init__.py b/tests/__init__.py
index 8f6cf068..cbf94b28 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
index 8f6cf068..cbf94b28 100644
--- a/tests/unit/__init__.py
+++ b/tests/unit/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py
index 8f6cf068..cbf94b28 100644
--- a/tests/unit/gapic/__init__.py
+++ b/tests/unit/gapic/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/errorreporting_v1beta1/__init__.py b/tests/unit/gapic/errorreporting_v1beta1/__init__.py
index 8f6cf068..cbf94b28 100644
--- a/tests/unit/gapic/errorreporting_v1beta1/__init__.py
+++ b/tests/unit/gapic/errorreporting_v1beta1/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py
index e6e0089b..67adff43 100644
--- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py
+++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
import grpc
from grpc.experimental import aio
-from collections.abc import Iterable
+from collections.abc import Iterable, AsyncIterable
from google.protobuf import json_format
import json
import math
@@ -37,6 +37,13 @@
from requests.sessions import Session
from google.protobuf import json_format
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
@@ -59,10 +66,32 @@
import google.auth
+CRED_INFO_JSON = {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+}
+CRED_INFO_STRING = json.dumps(CRED_INFO_JSON)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -312,83 +341,46 @@ def test__get_universe_domain():
@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
+ "error_code,cred_info_json,show_cred_info",
[
- (ErrorGroupServiceClient, transports.ErrorGroupServiceGrpcTransport, "grpc"),
- (ErrorGroupServiceClient, transports.ErrorGroupServiceRestTransport, "rest"),
+ (401, CRED_INFO_JSON, True),
+ (403, CRED_INFO_JSON, True),
+ (404, CRED_INFO_JSON, True),
+ (500, CRED_INFO_JSON, False),
+ (401, None, False),
+ (403, None, False),
+ (404, None, False),
+ (500, None, False),
],
)
-def test__validate_universe_domain(client_class, transport_class, transport_name):
- client = client_class(
- transport=transport_class(credentials=ga_credentials.AnonymousCredentials())
- )
- assert client._validate_universe_domain() == True
-
- # Test the case when universe is already validated.
- assert client._validate_universe_domain() == True
-
- if transport_name == "grpc":
- # Test the case where credentials are provided by the
- # `local_channel_credentials`. The default universes in both match.
- channel = grpc.secure_channel(
- "http://localhost/", grpc.local_channel_credentials()
- )
- client = client_class(transport=transport_class(channel=channel))
- assert client._validate_universe_domain() == True
+def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info):
+ cred = mock.Mock(["get_cred_info"])
+ cred.get_cred_info = mock.Mock(return_value=cred_info_json)
+ client = ErrorGroupServiceClient(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=["foo"])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ if show_cred_info:
+ assert error.details == ["foo", CRED_INFO_STRING]
+ else:
+ assert error.details == ["foo"]
- # Test the case where credentials do not exist: e.g. a transport is provided
- # with no credentials. Validation should still succeed because there is no
- # mismatch with non-existent credentials.
- channel = grpc.secure_channel(
- "http://localhost/", grpc.local_channel_credentials()
- )
- transport = transport_class(channel=channel)
- transport._credentials = None
- client = client_class(transport=transport)
- assert client._validate_universe_domain() == True
- # TODO: This is needed to cater for older versions of google-auth
- # Make this test unconditional once the minimum supported version of
- # google-auth becomes 2.23.0 or higher.
- google_auth_major, google_auth_minor = [
- int(part) for part in google.auth.__version__.split(".")[0:2]
- ]
- if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
- credentials = ga_credentials.AnonymousCredentials()
- credentials._universe_domain = "foo.com"
- # Test the case when there is a universe mismatch from the credentials.
- client = client_class(transport=transport_class(credentials=credentials))
- with pytest.raises(ValueError) as excinfo:
- client._validate_universe_domain()
- assert (
- str(excinfo.value)
- == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
- )
+@pytest.mark.parametrize("error_code", [401, 403, 404, 500])
+def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code):
+ cred = mock.Mock([])
+ assert not hasattr(cred, "get_cred_info")
+ client = ErrorGroupServiceClient(credentials=cred)
+ client._transport._credentials = cred
- # Test the case when there is a universe mismatch from the client.
- #
- # TODO: Make this test unconditional once the minimum supported version of
- # google-api-core becomes 2.15.0 or higher.
- api_core_major, api_core_minor = [
- int(part) for part in api_core_version.__version__.split(".")[0:2]
- ]
- if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
- client = client_class(
- client_options={"universe_domain": "bar.com"},
- transport=transport_class(
- credentials=ga_credentials.AnonymousCredentials(),
- ),
- )
- with pytest.raises(ValueError) as excinfo:
- client._validate_universe_domain()
- assert (
- str(excinfo.value)
- == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
- )
+ error = core_exceptions.GoogleAPICallError("message", details=[])
+ error.code = error_code
- # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
- with pytest.raises(ValueError):
- client._compare_universes("foo.bar", None)
+ client._add_cred_info_for_auth_errors(error)
+ assert error.details == []
@pytest.mark.parametrize(
@@ -1202,25 +1194,6 @@ def test_get_group(request_type, transport: str = "grpc"):
assert response.resolution_status == common.ResolutionStatus.OPEN
-def test_get_group_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.get_group), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.get_group()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_group_service.GetGroupRequest()
-
-
def test_get_group_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1284,38 +1257,13 @@ def test_get_group_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_get_group_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.get_group), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- common.ErrorGroup(
- name="name_value",
- group_id="group_id_value",
- resolution_status=common.ResolutionStatus.OPEN,
- )
- )
- response = await client.get_group()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_group_service.GetGroupRequest()
-
-
@pytest.mark.asyncio
async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1354,7 +1302,7 @@ async def test_get_group_async(
transport: str = "grpc_asyncio", request_type=error_group_service.GetGroupRequest
):
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1424,7 +1372,7 @@ def test_get_group_field_headers():
@pytest.mark.asyncio
async def test_get_group_field_headers_async():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1492,7 +1440,7 @@ def test_get_group_flattened_error():
@pytest.mark.asyncio
async def test_get_group_flattened_async():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1519,7 +1467,7 @@ async def test_get_group_flattened_async():
@pytest.mark.asyncio
async def test_get_group_flattened_error_async():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1571,25 +1519,6 @@ def test_update_group(request_type, transport: str = "grpc"):
assert response.resolution_status == common.ResolutionStatus.OPEN
-def test_update_group_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.update_group), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.update_group()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_group_service.UpdateGroupRequest()
-
-
def test_update_group_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1649,31 +1578,6 @@ def test_update_group_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_update_group_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.update_group), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- common.ErrorGroup(
- name="name_value",
- group_id="group_id_value",
- resolution_status=common.ResolutionStatus.OPEN,
- )
- )
- response = await client.update_group()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_group_service.UpdateGroupRequest()
-
-
@pytest.mark.asyncio
async def test_update_group_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1682,7 +1586,7 @@ async def test_update_group_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1721,7 +1625,7 @@ async def test_update_group_async(
transport: str = "grpc_asyncio", request_type=error_group_service.UpdateGroupRequest
):
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1791,7 +1695,7 @@ def test_update_group_field_headers():
@pytest.mark.asyncio
async def test_update_group_field_headers_async():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1859,7 +1763,7 @@ def test_update_group_flattened_error():
@pytest.mark.asyncio
async def test_update_group_flattened_async():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1886,7 +1790,7 @@ async def test_update_group_flattened_async():
@pytest.mark.asyncio
async def test_update_group_flattened_error_async():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1898,50 +1802,6 @@ async def test_update_group_flattened_error_async():
)
-@pytest.mark.parametrize(
- "request_type",
- [
- error_group_service.GetGroupRequest,
- dict,
- ],
-)
-def test_get_group_rest(request_type):
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"group_name": "projects/sample1/groups/sample2"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = common.ErrorGroup(
- name="name_value",
- group_id="group_id_value",
- resolution_status=common.ResolutionStatus.OPEN,
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = common.ErrorGroup.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.get_group(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, common.ErrorGroup)
- assert response.name == "name_value"
- assert response.group_id == "group_id_value"
- assert response.resolution_status == common.ResolutionStatus.OPEN
-
-
def test_get_group_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -2044,6 +1904,7 @@ def test_get_group_rest_required_fields(
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
response = client.get_group(request)
@@ -2061,85 +1922,6 @@ def test_get_group_rest_unset_required_fields():
assert set(unset_fields) == (set(()) & set(("groupName",)))
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_get_group_rest_interceptors(null_interceptor):
- transport = transports.ErrorGroupServiceRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.ErrorGroupServiceRestInterceptor(),
- )
- client = ErrorGroupServiceClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.ErrorGroupServiceRestInterceptor, "post_get_group"
- ) as post, mock.patch.object(
- transports.ErrorGroupServiceRestInterceptor, "pre_get_group"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = error_group_service.GetGroupRequest.pb(
- error_group_service.GetGroupRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = common.ErrorGroup.to_json(common.ErrorGroup())
-
- request = error_group_service.GetGroupRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = common.ErrorGroup()
-
- client.get_group(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_get_group_rest_bad_request(
- transport: str = "rest", request_type=error_group_service.GetGroupRequest
-):
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"group_name": "projects/sample1/groups/sample2"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.get_group(request)
-
-
def test_get_group_rest_flattened():
client = ErrorGroupServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -2168,6 +1950,7 @@ def test_get_group_rest_flattened():
json_return_value = json_format.MessageToJson(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
client.get_group(**mock_args)
@@ -2196,144 +1979,21 @@ def test_get_group_rest_flattened_error(transport: str = "rest"):
)
-def test_get_group_rest_error():
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
+def test_update_group_rest_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
-@pytest.mark.parametrize(
- "request_type",
- [
- error_group_service.UpdateGroupRequest,
- dict,
- ],
-)
-def test_update_group_rest(request_type):
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"group": {"name": "projects/sample1/groups/sample2"}}
- request_init["group"] = {
- "name": "projects/sample1/groups/sample2",
- "group_id": "group_id_value",
- "tracking_issues": [{"url": "url_value"}],
- "resolution_status": 1,
- }
- # The version of a generated dependency at test runtime may differ from the version used during generation.
- # Delete any fields which are not present in the current runtime dependency
- # See https://github.com/googleapis/gapic-generator-python/issues/1748
-
- # Determine if the message type is proto-plus or protobuf
- test_field = error_group_service.UpdateGroupRequest.meta.fields["group"]
-
- def get_message_fields(field):
- # Given a field which is a message (composite type), return a list with
- # all the fields of the message.
- # If the field is not a composite type, return an empty list.
- message_fields = []
-
- if hasattr(field, "message") and field.message:
- is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
-
- if is_field_type_proto_plus_type:
- message_fields = field.message.meta.fields.values()
- # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
- else: # pragma: NO COVER
- message_fields = field.message.DESCRIPTOR.fields
- return message_fields
-
- runtime_nested_fields = [
- (field.name, nested_field.name)
- for field in get_message_fields(test_field)
- for nested_field in get_message_fields(field)
- ]
-
- subfields_not_in_runtime = []
-
- # For each item in the sample request, create a list of sub fields which are not present at runtime
- # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
- for field, value in request_init["group"].items(): # pragma: NO COVER
- result = None
- is_repeated = False
- # For repeated fields
- if isinstance(value, list) and len(value):
- is_repeated = True
- result = value[0]
- # For fields where the type is another message
- if isinstance(value, dict):
- result = value
-
- if result and hasattr(result, "keys"):
- for subfield in result.keys():
- if (field, subfield) not in runtime_nested_fields:
- subfields_not_in_runtime.append(
- {
- "field": field,
- "subfield": subfield,
- "is_repeated": is_repeated,
- }
- )
-
- # Remove fields from the sample request which are not present in the runtime version of the dependency
- # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
- for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
- field = subfield_to_delete.get("field")
- field_repeated = subfield_to_delete.get("is_repeated")
- subfield = subfield_to_delete.get("subfield")
- if subfield:
- if field_repeated:
- for i in range(0, len(request_init["group"][field])):
- del request_init["group"][field][i][subfield]
- else:
- del request_init["group"][field][subfield]
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = common.ErrorGroup(
- name="name_value",
- group_id="group_id_value",
- resolution_status=common.ResolutionStatus.OPEN,
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = common.ErrorGroup.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.update_group(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, common.ErrorGroup)
- assert response.name == "name_value"
- assert response.group_id == "group_id_value"
- assert response.resolution_status == common.ResolutionStatus.OPEN
-
-
-def test_update_group_rest_use_cached_wrapped_rpc():
- # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
- # instead of constructing them on each call
- with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Should wrap all calls on client creation
- assert wrapper_fn.call_count > 0
- wrapper_fn.reset_mock()
-
- # Ensure method has been cached
- assert client._transport.update_group in client._transport._wrapped_methods
+ # Ensure method has been cached
+ assert client._transport.update_group in client._transport._wrapped_methods
# Replace cached wrapped function with mock
mock_rpc = mock.Mock()
@@ -2415,23 +2075,579 @@ def test_update_group_rest_required_fields(
return_value = common.ErrorGroup.pb(return_value)
json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+
+ response = client.update_group(request)
+
+ expected_params = [("$alt", "json;enum-encoding=int")]
+ actual_params = req.call_args.kwargs["params"]
+ assert expected_params == actual_params
+
+
+def test_update_group_rest_unset_required_fields():
+ transport = transports.ErrorGroupServiceRestTransport(
+ credentials=ga_credentials.AnonymousCredentials
+ )
+
+ unset_fields = transport.update_group._get_unset_required_fields({})
+ assert set(unset_fields) == (set(()) & set(("group",)))
+
+
+def test_update_group_rest_flattened():
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = common.ErrorGroup()
+
+ # get arguments that satisfy an http rule for this method
+ sample_request = {"group": {"name": "projects/sample1/groups/sample2"}}
+
+ # get truthy value for each flattened field
+ mock_args = dict(
+ group=common.ErrorGroup(name="name_value"),
+ )
+ mock_args.update(sample_request)
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ # Convert return value to protobuf type
+ return_value = common.ErrorGroup.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+
+ client.update_group(**mock_args)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(req.mock_calls) == 1
+ _, args, _ = req.mock_calls[0]
+ assert path_template.validate(
+ "%s/v1beta1/{group.name=projects/*/groups/*}" % client.transport._host,
+ args[1],
+ )
+
+
+def test_update_group_rest_flattened_error(transport: str = "rest"):
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_group(
+ error_group_service.UpdateGroupRequest(),
+ group=common.ErrorGroup(name="name_value"),
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.ErrorGroupServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.ErrorGroupServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ErrorGroupServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a transport instance.
+ transport = transports.ErrorGroupServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = ErrorGroupServiceClient(
+ client_options=options,
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a credential.
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = ErrorGroupServiceClient(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.ErrorGroupServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ErrorGroupServiceClient(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ErrorGroupServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = ErrorGroupServiceClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ErrorGroupServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.ErrorGroupServiceGrpcAsyncIOTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ErrorGroupServiceGrpcTransport,
+ transports.ErrorGroupServiceGrpcAsyncIOTransport,
+ transports.ErrorGroupServiceRestTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_kind_grpc():
+ transport = ErrorGroupServiceClient.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_group_empty_call_grpc():
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_group), "__call__") as call:
+ call.return_value = common.ErrorGroup()
+ client.get_group(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_group_service.GetGroupRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_group_empty_call_grpc():
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_group), "__call__") as call:
+ call.return_value = common.ErrorGroup()
+ client.update_group(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_group_service.UpdateGroupRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = ErrorGroupServiceAsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = ErrorGroupServiceAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_group_empty_call_grpc_asyncio():
+ client = ErrorGroupServiceAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_group), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ common.ErrorGroup(
+ name="name_value",
+ group_id="group_id_value",
+ resolution_status=common.ResolutionStatus.OPEN,
+ )
+ )
+ await client.get_group(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_group_service.GetGroupRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_group_empty_call_grpc_asyncio():
+ client = ErrorGroupServiceAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_group), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ common.ErrorGroup(
+ name="name_value",
+ group_id="group_id_value",
+ resolution_status=common.ResolutionStatus.OPEN,
+ )
+ )
+ await client.update_group(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_group_service.UpdateGroupRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+ transport = ErrorGroupServiceClient.get_transport_class("rest")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "rest"
+
+
+def test_get_group_rest_bad_request(request_type=error_group_service.GetGroupRequest):
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"group_name": "projects/sample1/groups/sample2"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ client.get_group(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ error_group_service.GetGroupRequest,
+ dict,
+ ],
+)
+def test_get_group_rest_call_success(request_type):
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"group_name": "projects/sample1/groups/sample2"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = common.ErrorGroup(
+ name="name_value",
+ group_id="group_id_value",
+ resolution_status=common.ResolutionStatus.OPEN,
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = common.ErrorGroup.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ response = client.get_group(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, common.ErrorGroup)
+ assert response.name == "name_value"
+ assert response.group_id == "group_id_value"
+ assert response.resolution_status == common.ResolutionStatus.OPEN
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_get_group_rest_interceptors(null_interceptor):
+ transport = transports.ErrorGroupServiceRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.ErrorGroupServiceRestInterceptor(),
+ )
+ client = ErrorGroupServiceClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.ErrorGroupServiceRestInterceptor, "post_get_group"
+ ) as post, mock.patch.object(
+ transports.ErrorGroupServiceRestInterceptor, "post_get_group_with_metadata"
+ ) as post_with_metadata, mock.patch.object(
+ transports.ErrorGroupServiceRestInterceptor, "pre_get_group"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ post_with_metadata.assert_not_called()
+ pb_message = error_group_service.GetGroupRequest.pb(
+ error_group_service.GetGroupRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ return_value = common.ErrorGroup.to_json(common.ErrorGroup())
+ req.return_value.content = return_value
+
+ request = error_group_service.GetGroupRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = common.ErrorGroup()
+ post_with_metadata.return_value = common.ErrorGroup(), metadata
+
+ client.get_group(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+ post_with_metadata.assert_called_once()
+
+
+def test_update_group_rest_bad_request(
+ request_type=error_group_service.UpdateGroupRequest,
+):
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"group": {"name": "projects/sample1/groups/sample2"}}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ client.update_group(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ error_group_service.UpdateGroupRequest,
+ dict,
+ ],
+)
+def test_update_group_rest_call_success(request_type):
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"group": {"name": "projects/sample1/groups/sample2"}}
+ request_init["group"] = {
+ "name": "projects/sample1/groups/sample2",
+ "group_id": "group_id_value",
+ "tracking_issues": [{"url": "url_value"}],
+ "resolution_status": 1,
+ }
+ # The version of a generated dependency at test runtime may differ from the version used during generation.
+ # Delete any fields which are not present in the current runtime dependency
+ # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+ # Determine if the message type is proto-plus or protobuf
+ test_field = error_group_service.UpdateGroupRequest.meta.fields["group"]
+
+ def get_message_fields(field):
+ # Given a field which is a message (composite type), return a list with
+ # all the fields of the message.
+ # If the field is not a composite type, return an empty list.
+ message_fields = []
+
+ if hasattr(field, "message") and field.message:
+ is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+ if is_field_type_proto_plus_type:
+ message_fields = field.message.meta.fields.values()
+ # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+ else: # pragma: NO COVER
+ message_fields = field.message.DESCRIPTOR.fields
+ return message_fields
+
+ runtime_nested_fields = [
+ (field.name, nested_field.name)
+ for field in get_message_fields(test_field)
+ for nested_field in get_message_fields(field)
+ ]
+
+ subfields_not_in_runtime = []
+
+ # For each item in the sample request, create a list of sub fields which are not present at runtime
+ # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+ for field, value in request_init["group"].items(): # pragma: NO COVER
+ result = None
+ is_repeated = False
+ # For repeated fields
+ if isinstance(value, list) and len(value):
+ is_repeated = True
+ result = value[0]
+ # For fields where the type is another message
+ if isinstance(value, dict):
+ result = value
+
+ if result and hasattr(result, "keys"):
+ for subfield in result.keys():
+ if (field, subfield) not in runtime_nested_fields:
+ subfields_not_in_runtime.append(
+ {
+ "field": field,
+ "subfield": subfield,
+ "is_repeated": is_repeated,
+ }
+ )
- response = client.update_group(request)
+ # Remove fields from the sample request which are not present in the runtime version of the dependency
+ # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+ for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+ field = subfield_to_delete.get("field")
+ field_repeated = subfield_to_delete.get("is_repeated")
+ subfield = subfield_to_delete.get("subfield")
+ if subfield:
+ if field_repeated:
+ for i in range(0, len(request_init["group"][field])):
+ del request_init["group"][field][i][subfield]
+ else:
+ del request_init["group"][field][subfield]
+ request = request_type(**request_init)
- expected_params = [("$alt", "json;enum-encoding=int")]
- actual_params = req.call_args.kwargs["params"]
- assert expected_params == actual_params
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = common.ErrorGroup(
+ name="name_value",
+ group_id="group_id_value",
+ resolution_status=common.ResolutionStatus.OPEN,
+ )
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
-def test_update_group_rest_unset_required_fields():
- transport = transports.ErrorGroupServiceRestTransport(
- credentials=ga_credentials.AnonymousCredentials
- )
+ # Convert return value to protobuf type
+ return_value = common.ErrorGroup.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ response = client.update_group(request)
- unset_fields = transport.update_group._get_unset_required_fields({})
- assert set(unset_fields) == (set(()) & set(("group",)))
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, common.ErrorGroup)
+ assert response.name == "name_value"
+ assert response.group_id == "group_id_value"
+ assert response.resolution_status == common.ResolutionStatus.OPEN
@pytest.mark.parametrize("null_interceptor", [True, False])
@@ -2443,6 +2659,7 @@ def test_update_group_rest_interceptors(null_interceptor):
else transports.ErrorGroupServiceRestInterceptor(),
)
client = ErrorGroupServiceClient(transport=transport)
+
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
@@ -2450,10 +2667,13 @@ def test_update_group_rest_interceptors(null_interceptor):
) as transcode, mock.patch.object(
transports.ErrorGroupServiceRestInterceptor, "post_update_group"
) as post, mock.patch.object(
+ transports.ErrorGroupServiceRestInterceptor, "post_update_group_with_metadata"
+ ) as post_with_metadata, mock.patch.object(
transports.ErrorGroupServiceRestInterceptor, "pre_update_group"
) as pre:
pre.assert_not_called()
post.assert_not_called()
+ post_with_metadata.assert_not_called()
pb_message = error_group_service.UpdateGroupRequest.pb(
error_group_service.UpdateGroupRequest()
)
@@ -2464,10 +2684,11 @@ def test_update_group_rest_interceptors(null_interceptor):
"query_params": pb_message,
}
- req.return_value = Response()
+ req.return_value = mock.Mock()
req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = common.ErrorGroup.to_json(common.ErrorGroup())
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ return_value = common.ErrorGroup.to_json(common.ErrorGroup())
+ req.return_value.content = return_value
request = error_group_service.UpdateGroupRequest()
metadata = [
@@ -2476,6 +2697,7 @@ def test_update_group_rest_interceptors(null_interceptor):
]
pre.return_value = request, metadata
post.return_value = common.ErrorGroup()
+ post_with_metadata.return_value = common.ErrorGroup(), metadata
client.update_group(
request,
@@ -2487,198 +2709,54 @@ def test_update_group_rest_interceptors(null_interceptor):
pre.assert_called_once()
post.assert_called_once()
+ post_with_metadata.assert_called_once()
-def test_update_group_rest_bad_request(
- transport: str = "rest", request_type=error_group_service.UpdateGroupRequest
-):
+def test_initialize_client_w_rest():
client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
-
- # send a request that will satisfy transcoding
- request_init = {"group": {"name": "projects/sample1/groups/sample2"}}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.update_group(request)
+ assert client is not None
-def test_update_group_rest_flattened():
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_group_empty_call_rest():
client = ErrorGroupServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="rest",
)
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = common.ErrorGroup()
-
- # get arguments that satisfy an http rule for this method
- sample_request = {"group": {"name": "projects/sample1/groups/sample2"}}
-
- # get truthy value for each flattened field
- mock_args = dict(
- group=common.ErrorGroup(name="name_value"),
- )
- mock_args.update(sample_request)
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = common.ErrorGroup.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
-
- client.update_group(**mock_args)
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(req.mock_calls) == 1
- _, args, _ = req.mock_calls[0]
- assert path_template.validate(
- "%s/v1beta1/{group.name=projects/*/groups/*}" % client.transport._host,
- args[1],
- )
-
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_group), "__call__") as call:
+ client.get_group(request=None)
-def test_update_group_rest_flattened_error(transport: str = "rest"):
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_group_service.GetGroupRequest()
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.update_group(
- error_group_service.UpdateGroupRequest(),
- group=common.ErrorGroup(name="name_value"),
- )
+ assert args[0] == request_msg
-def test_update_group_rest_error():
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_group_empty_call_rest():
client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.ErrorGroupServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- with pytest.raises(ValueError):
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # It is an error to provide a credentials file and a transport instance.
- transport = transports.ErrorGroupServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- with pytest.raises(ValueError):
- client = ErrorGroupServiceClient(
- client_options={"credentials_file": "credentials.json"},
- transport=transport,
- )
-
- # It is an error to provide an api_key and a transport instance.
- transport = transports.ErrorGroupServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = ErrorGroupServiceClient(
- client_options=options,
- transport=transport,
- )
-
- # It is an error to provide an api_key and a credential.
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = ErrorGroupServiceClient(
- client_options=options, credentials=ga_credentials.AnonymousCredentials()
- )
-
- # It is an error to provide scopes and a transport instance.
- transport = transports.ErrorGroupServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- with pytest.raises(ValueError):
- client = ErrorGroupServiceClient(
- client_options={"scopes": ["1", "2"]},
- transport=transport,
- )
-
-
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ErrorGroupServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- client = ErrorGroupServiceClient(transport=transport)
- assert client.transport is transport
-
-
-def test_transport_get_channel():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ErrorGroupServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- channel = transport.grpc_channel
- assert channel
-
- transport = transports.ErrorGroupServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- channel = transport.grpc_channel
- assert channel
-
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.ErrorGroupServiceGrpcTransport,
- transports.ErrorGroupServiceGrpcAsyncIOTransport,
- transports.ErrorGroupServiceRestTransport,
- ],
-)
-def test_transport_adc(transport_class):
- # Test default credentials are used if not provided.
- with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- transport_class()
- adc.assert_called_once()
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_group), "__call__") as call:
+ client.update_group(request=None)
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_group_service.UpdateGroupRequest()
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- "rest",
- ],
-)
-def test_transport_kind(transport_name):
- transport = ErrorGroupServiceClient.get_transport_class(transport_name)(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- assert transport.kind == transport_name
+ assert args[0] == request_msg
def test_transport_grpc_default():
@@ -3259,36 +3337,41 @@ def test_client_with_default_client_info():
prep.assert_called_once_with(client_info)
+def test_transport_close_grpc():
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
@pytest.mark.asyncio
-async def test_transport_close_async():
+async def test_transport_close_grpc_asyncio():
client = ErrorGroupServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
)
with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
+ type(getattr(client.transport, "_grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
-def test_transport_close():
- transports = {
- "rest": "_session",
- "grpc": "_grpc_channel",
- }
-
- for transport, close_name in transports.items():
- client = ErrorGroupServiceClient(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+def test_transport_close_rest():
+ client = ErrorGroupServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_session")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py
index 9241fa39..0b1bdc25 100644
--- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py
+++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
import grpc
from grpc.experimental import aio
-from collections.abc import Iterable
+from collections.abc import Iterable, AsyncIterable
from google.protobuf import json_format
import json
import math
@@ -37,6 +37,13 @@
from requests.sessions import Session
from google.protobuf import json_format
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
@@ -62,10 +69,32 @@
import google.auth
+CRED_INFO_JSON = {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+}
+CRED_INFO_STRING = json.dumps(CRED_INFO_JSON)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -315,83 +344,46 @@ def test__get_universe_domain():
@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
+ "error_code,cred_info_json,show_cred_info",
[
- (ErrorStatsServiceClient, transports.ErrorStatsServiceGrpcTransport, "grpc"),
- (ErrorStatsServiceClient, transports.ErrorStatsServiceRestTransport, "rest"),
+ (401, CRED_INFO_JSON, True),
+ (403, CRED_INFO_JSON, True),
+ (404, CRED_INFO_JSON, True),
+ (500, CRED_INFO_JSON, False),
+ (401, None, False),
+ (403, None, False),
+ (404, None, False),
+ (500, None, False),
],
)
-def test__validate_universe_domain(client_class, transport_class, transport_name):
- client = client_class(
- transport=transport_class(credentials=ga_credentials.AnonymousCredentials())
- )
- assert client._validate_universe_domain() == True
-
- # Test the case when universe is already validated.
- assert client._validate_universe_domain() == True
-
- if transport_name == "grpc":
- # Test the case where credentials are provided by the
- # `local_channel_credentials`. The default universes in both match.
- channel = grpc.secure_channel(
- "http://localhost/", grpc.local_channel_credentials()
- )
- client = client_class(transport=transport_class(channel=channel))
- assert client._validate_universe_domain() == True
+def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info):
+ cred = mock.Mock(["get_cred_info"])
+ cred.get_cred_info = mock.Mock(return_value=cred_info_json)
+ client = ErrorStatsServiceClient(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=["foo"])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ if show_cred_info:
+ assert error.details == ["foo", CRED_INFO_STRING]
+ else:
+ assert error.details == ["foo"]
- # Test the case where credentials do not exist: e.g. a transport is provided
- # with no credentials. Validation should still succeed because there is no
- # mismatch with non-existent credentials.
- channel = grpc.secure_channel(
- "http://localhost/", grpc.local_channel_credentials()
- )
- transport = transport_class(channel=channel)
- transport._credentials = None
- client = client_class(transport=transport)
- assert client._validate_universe_domain() == True
- # TODO: This is needed to cater for older versions of google-auth
- # Make this test unconditional once the minimum supported version of
- # google-auth becomes 2.23.0 or higher.
- google_auth_major, google_auth_minor = [
- int(part) for part in google.auth.__version__.split(".")[0:2]
- ]
- if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
- credentials = ga_credentials.AnonymousCredentials()
- credentials._universe_domain = "foo.com"
- # Test the case when there is a universe mismatch from the credentials.
- client = client_class(transport=transport_class(credentials=credentials))
- with pytest.raises(ValueError) as excinfo:
- client._validate_universe_domain()
- assert (
- str(excinfo.value)
- == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
- )
+@pytest.mark.parametrize("error_code", [401, 403, 404, 500])
+def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code):
+ cred = mock.Mock([])
+ assert not hasattr(cred, "get_cred_info")
+ client = ErrorStatsServiceClient(credentials=cred)
+ client._transport._credentials = cred
- # Test the case when there is a universe mismatch from the client.
- #
- # TODO: Make this test unconditional once the minimum supported version of
- # google-api-core becomes 2.15.0 or higher.
- api_core_major, api_core_minor = [
- int(part) for part in api_core_version.__version__.split(".")[0:2]
- ]
- if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
- client = client_class(
- client_options={"universe_domain": "bar.com"},
- transport=transport_class(
- credentials=ga_credentials.AnonymousCredentials(),
- ),
- )
- with pytest.raises(ValueError) as excinfo:
- client._validate_universe_domain()
- assert (
- str(excinfo.value)
- == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
- )
+ error = core_exceptions.GoogleAPICallError("message", details=[])
+ error.code = error_code
- # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
- with pytest.raises(ValueError):
- client._compare_universes("foo.bar", None)
+ client._add_cred_info_for_auth_errors(error)
+ assert error.details == []
@pytest.mark.parametrize(
@@ -1201,25 +1193,6 @@ def test_list_group_stats(request_type, transport: str = "grpc"):
assert response.next_page_token == "next_page_token_value"
-def test_list_group_stats_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.list_group_stats()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_stats_service.ListGroupStatsRequest()
-
-
def test_list_group_stats_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1287,29 +1260,6 @@ def test_list_group_stats_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_list_group_stats_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- error_stats_service.ListGroupStatsResponse(
- next_page_token="next_page_token_value",
- )
- )
- response = await client.list_group_stats()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_stats_service.ListGroupStatsRequest()
-
-
@pytest.mark.asyncio
async def test_list_group_stats_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1318,7 +1268,7 @@ async def test_list_group_stats_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1358,7 +1308,7 @@ async def test_list_group_stats_async(
request_type=error_stats_service.ListGroupStatsRequest,
):
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1424,7 +1374,7 @@ def test_list_group_stats_field_headers():
@pytest.mark.asyncio
async def test_list_group_stats_field_headers_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1505,7 +1455,7 @@ def test_list_group_stats_flattened_error():
@pytest.mark.asyncio
async def test_list_group_stats_flattened_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1542,7 +1492,7 @@ async def test_list_group_stats_flattened_async():
@pytest.mark.asyncio
async def test_list_group_stats_flattened_error_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1655,7 +1605,7 @@ def test_list_group_stats_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_group_stats_async_pager():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1707,7 +1657,7 @@ async def test_list_group_stats_async_pager():
@pytest.mark.asyncio
async def test_list_group_stats_async_pages():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1789,25 +1739,6 @@ def test_list_events(request_type, transport: str = "grpc"):
assert response.next_page_token == "next_page_token_value"
-def test_list_events_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_events), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.list_events()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_stats_service.ListEventsRequest()
-
-
def test_list_events_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1875,29 +1806,6 @@ def test_list_events_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_list_events_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_events), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- error_stats_service.ListEventsResponse(
- next_page_token="next_page_token_value",
- )
- )
- response = await client.list_events()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_stats_service.ListEventsRequest()
-
-
@pytest.mark.asyncio
async def test_list_events_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1906,7 +1814,7 @@ async def test_list_events_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1945,7 +1853,7 @@ async def test_list_events_async(
transport: str = "grpc_asyncio", request_type=error_stats_service.ListEventsRequest
):
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2011,7 +1919,7 @@ def test_list_events_field_headers():
@pytest.mark.asyncio
async def test_list_events_field_headers_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2086,7 +1994,7 @@ def test_list_events_flattened_error():
@pytest.mark.asyncio
async def test_list_events_flattened_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2119,7 +2027,7 @@ async def test_list_events_flattened_async():
@pytest.mark.asyncio
async def test_list_events_flattened_error_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2230,7 +2138,7 @@ def test_list_events_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_events_async_pager():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2280,7 +2188,7 @@ async def test_list_events_async_pager():
@pytest.mark.asyncio
async def test_list_events_async_pages():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2359,25 +2267,6 @@ def test_delete_events(request_type, transport: str = "grpc"):
assert isinstance(response, error_stats_service.DeleteEventsResponse)
-def test_delete_events_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.delete_events), "__call__") as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.delete_events()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_stats_service.DeleteEventsRequest()
-
-
def test_delete_events_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -2441,27 +2330,6 @@ def test_delete_events_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_delete_events_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.delete_events), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- error_stats_service.DeleteEventsResponse()
- )
- response = await client.delete_events()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == error_stats_service.DeleteEventsRequest()
-
-
@pytest.mark.asyncio
async def test_delete_events_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -2470,7 +2338,7 @@ async def test_delete_events_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2510,7 +2378,7 @@ async def test_delete_events_async(
request_type=error_stats_service.DeleteEventsRequest,
):
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2573,7 +2441,7 @@ def test_delete_events_field_headers():
@pytest.mark.asyncio
async def test_delete_events_field_headers_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2643,7 +2511,7 @@ def test_delete_events_flattened_error():
@pytest.mark.asyncio
async def test_delete_events_flattened_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2672,7 +2540,7 @@ async def test_delete_events_flattened_async():
@pytest.mark.asyncio
async def test_delete_events_flattened_error_async():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2684,46 +2552,6 @@ async def test_delete_events_flattened_error_async():
)
-@pytest.mark.parametrize(
- "request_type",
- [
- error_stats_service.ListGroupStatsRequest,
- dict,
- ],
-)
-def test_list_group_stats_rest(request_type):
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = error_stats_service.ListGroupStatsResponse(
- next_page_token="next_page_token_value",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = error_stats_service.ListGroupStatsResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.list_group_stats(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListGroupStatsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
def test_list_group_stats_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -2842,6 +2670,7 @@ def test_list_group_stats_rest_required_fields(
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
response = client.list_group_stats(request)
@@ -2874,120 +2703,40 @@ def test_list_group_stats_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_list_group_stats_rest_interceptors(null_interceptor):
- transport = transports.ErrorStatsServiceRestTransport(
+def test_list_group_stats_rest_flattened():
+ client = ErrorStatsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.ErrorStatsServiceRestInterceptor(),
+ transport="rest",
)
- client = ErrorStatsServiceClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.ErrorStatsServiceRestInterceptor, "post_list_group_stats"
- ) as post, mock.patch.object(
- transports.ErrorStatsServiceRestInterceptor, "pre_list_group_stats"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = error_stats_service.ListGroupStatsRequest.pb(
- error_stats_service.ListGroupStatsRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = error_stats_service.ListGroupStatsResponse.to_json(
- error_stats_service.ListGroupStatsResponse()
- )
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = error_stats_service.ListGroupStatsResponse()
- request = error_stats_service.ListGroupStatsRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = error_stats_service.ListGroupStatsResponse()
+ # get arguments that satisfy an http rule for this method
+ sample_request = {"project_name": "projects/sample1"}
- client.list_group_stats(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
+ # get truthy value for each flattened field
+ mock_args = dict(
+ project_name="project_name_value",
+ time_range=error_stats_service.QueryTimeRange(
+ period=error_stats_service.QueryTimeRange.Period.PERIOD_1_HOUR
+ ),
)
+ mock_args.update(sample_request)
- pre.assert_called_once()
- post.assert_called_once()
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ # Convert return value to protobuf type
+ return_value = error_stats_service.ListGroupStatsResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
-
-def test_list_group_stats_rest_bad_request(
- transport: str = "rest", request_type=error_stats_service.ListGroupStatsRequest
-):
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.list_group_stats(request)
-
-
-def test_list_group_stats_rest_flattened():
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = error_stats_service.ListGroupStatsResponse()
-
- # get arguments that satisfy an http rule for this method
- sample_request = {"project_name": "projects/sample1"}
-
- # get truthy value for each flattened field
- mock_args = dict(
- project_name="project_name_value",
- time_range=error_stats_service.QueryTimeRange(
- period=error_stats_service.QueryTimeRange.Period.PERIOD_1_HOUR
- ),
- )
- mock_args.update(sample_request)
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = error_stats_service.ListGroupStatsResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
-
- client.list_group_stats(**mock_args)
+ client.list_group_stats(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
@@ -3080,46 +2829,6 @@ def test_list_group_stats_rest_pager(transport: str = "rest"):
assert page_.raw_page.next_page_token == token
-@pytest.mark.parametrize(
- "request_type",
- [
- error_stats_service.ListEventsRequest,
- dict,
- ],
-)
-def test_list_events_rest(request_type):
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = error_stats_service.ListEventsResponse(
- next_page_token="next_page_token_value",
- )
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = error_stats_service.ListEventsResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.list_events(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListEventsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
def test_list_events_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -3239,6 +2948,7 @@ def test_list_events_rest_required_fields(
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
response = client.list_events(request)
@@ -3278,87 +2988,6 @@ def test_list_events_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_list_events_rest_interceptors(null_interceptor):
- transport = transports.ErrorStatsServiceRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.ErrorStatsServiceRestInterceptor(),
- )
- client = ErrorStatsServiceClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.ErrorStatsServiceRestInterceptor, "post_list_events"
- ) as post, mock.patch.object(
- transports.ErrorStatsServiceRestInterceptor, "pre_list_events"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = error_stats_service.ListEventsRequest.pb(
- error_stats_service.ListEventsRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
-
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = error_stats_service.ListEventsResponse.to_json(
- error_stats_service.ListEventsResponse()
- )
-
- request = error_stats_service.ListEventsRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = error_stats_service.ListEventsResponse()
-
- client.list_events(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_list_events_rest_bad_request(
- transport: str = "rest", request_type=error_stats_service.ListEventsRequest
-):
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.list_events(request)
-
-
def test_list_events_rest_flattened():
client = ErrorStatsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
@@ -3388,6 +3017,7 @@ def test_list_events_rest_flattened():
json_return_value = json_format.MessageToJson(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
client.list_events(**mock_args)
@@ -3480,43 +3110,6 @@ def test_list_events_rest_pager(transport: str = "rest"):
assert page_.raw_page.next_page_token == token
-@pytest.mark.parametrize(
- "request_type",
- [
- error_stats_service.DeleteEventsRequest,
- dict,
- ],
-)
-def test_delete_events_rest(request_type):
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = error_stats_service.DeleteEventsResponse()
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = error_stats_service.DeleteEventsResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.delete_events(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, error_stats_service.DeleteEventsResponse)
-
-
def test_delete_events_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -3619,6 +3212,7 @@ def test_delete_events_rest_required_fields(
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
response = client.delete_events(request)
@@ -3632,12 +3226,536 @@ def test_delete_events_rest_unset_required_fields():
credentials=ga_credentials.AnonymousCredentials
)
- unset_fields = transport.delete_events._get_unset_required_fields({})
- assert set(unset_fields) == (set(()) & set(("projectName",)))
+ unset_fields = transport.delete_events._get_unset_required_fields({})
+ assert set(unset_fields) == (set(()) & set(("projectName",)))
+
+
+def test_delete_events_rest_flattened():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
+ )
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = error_stats_service.DeleteEventsResponse()
+
+ # get arguments that satisfy an http rule for this method
+ sample_request = {"project_name": "projects/sample1"}
+
+ # get truthy value for each flattened field
+ mock_args = dict(
+ project_name="project_name_value",
+ )
+ mock_args.update(sample_request)
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ # Convert return value to protobuf type
+ return_value = error_stats_service.DeleteEventsResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+
+ client.delete_events(**mock_args)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(req.mock_calls) == 1
+ _, args, _ = req.mock_calls[0]
+ assert path_template.validate(
+ "%s/v1beta1/{project_name=projects/*}/events" % client.transport._host,
+ args[1],
+ )
+
+
+def test_delete_events_rest_flattened_error(transport: str = "rest"):
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_events(
+ error_stats_service.DeleteEventsRequest(),
+ project_name="project_name_value",
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.ErrorStatsServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.ErrorStatsServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ErrorStatsServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a transport instance.
+ transport = transports.ErrorStatsServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = ErrorStatsServiceClient(
+ client_options=options,
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a credential.
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = ErrorStatsServiceClient(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.ErrorStatsServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ErrorStatsServiceClient(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ErrorStatsServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = ErrorStatsServiceClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ErrorStatsServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.ErrorStatsServiceGrpcAsyncIOTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ErrorStatsServiceGrpcTransport,
+ transports.ErrorStatsServiceGrpcAsyncIOTransport,
+ transports.ErrorStatsServiceRestTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_kind_grpc():
+ transport = ErrorStatsServiceClient.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_group_stats_empty_call_grpc():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call:
+ call.return_value = error_stats_service.ListGroupStatsResponse()
+ client.list_group_stats(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.ListGroupStatsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_events_empty_call_grpc():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_events), "__call__") as call:
+ call.return_value = error_stats_service.ListEventsResponse()
+ client.list_events(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.ListEventsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_events_empty_call_grpc():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_events), "__call__") as call:
+ call.return_value = error_stats_service.DeleteEventsResponse()
+ client.delete_events(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.DeleteEventsRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = ErrorStatsServiceAsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = ErrorStatsServiceAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_group_stats_empty_call_grpc_asyncio():
+ client = ErrorStatsServiceAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ error_stats_service.ListGroupStatsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_group_stats(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.ListGroupStatsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_events_empty_call_grpc_asyncio():
+ client = ErrorStatsServiceAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_events), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ error_stats_service.ListEventsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_events(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.ListEventsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_events_empty_call_grpc_asyncio():
+ client = ErrorStatsServiceAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_events), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ error_stats_service.DeleteEventsResponse()
+ )
+ await client.delete_events(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.DeleteEventsRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+ transport = ErrorStatsServiceClient.get_transport_class("rest")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "rest"
+
+
+def test_list_group_stats_rest_bad_request(
+ request_type=error_stats_service.ListGroupStatsRequest,
+):
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ client.list_group_stats(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ error_stats_service.ListGroupStatsRequest,
+ dict,
+ ],
+)
+def test_list_group_stats_rest_call_success(request_type):
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = error_stats_service.ListGroupStatsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = error_stats_service.ListGroupStatsResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ response = client.list_group_stats(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListGroupStatsPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_list_group_stats_rest_interceptors(null_interceptor):
+ transport = transports.ErrorStatsServiceRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.ErrorStatsServiceRestInterceptor(),
+ )
+ client = ErrorStatsServiceClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor, "post_list_group_stats"
+ ) as post, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor,
+ "post_list_group_stats_with_metadata",
+ ) as post_with_metadata, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor, "pre_list_group_stats"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ post_with_metadata.assert_not_called()
+ pb_message = error_stats_service.ListGroupStatsRequest.pb(
+ error_stats_service.ListGroupStatsRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ return_value = error_stats_service.ListGroupStatsResponse.to_json(
+ error_stats_service.ListGroupStatsResponse()
+ )
+ req.return_value.content = return_value
+
+ request = error_stats_service.ListGroupStatsRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = error_stats_service.ListGroupStatsResponse()
+ post_with_metadata.return_value = (
+ error_stats_service.ListGroupStatsResponse(),
+ metadata,
+ )
+
+ client.list_group_stats(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+ post_with_metadata.assert_called_once()
+
+
+def test_list_events_rest_bad_request(
+ request_type=error_stats_service.ListEventsRequest,
+):
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ client.list_events(request)
+
+
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ error_stats_service.ListEventsRequest,
+ dict,
+ ],
+)
+def test_list_events_rest_call_success(request_type):
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = error_stats_service.ListEventsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = error_stats_service.ListEventsResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ response = client.list_events(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListEventsPager)
+ assert response.next_page_token == "next_page_token_value"
@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_delete_events_rest_interceptors(null_interceptor):
+def test_list_events_rest_interceptors(null_interceptor):
transport = transports.ErrorStatsServiceRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
@@ -3645,19 +3763,23 @@ def test_delete_events_rest_interceptors(null_interceptor):
else transports.ErrorStatsServiceRestInterceptor(),
)
client = ErrorStatsServiceClient(transport=transport)
+
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
- transports.ErrorStatsServiceRestInterceptor, "post_delete_events"
+ transports.ErrorStatsServiceRestInterceptor, "post_list_events"
) as post, mock.patch.object(
- transports.ErrorStatsServiceRestInterceptor, "pre_delete_events"
+ transports.ErrorStatsServiceRestInterceptor, "post_list_events_with_metadata"
+ ) as post_with_metadata, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor, "pre_list_events"
) as pre:
pre.assert_not_called()
post.assert_not_called()
- pb_message = error_stats_service.DeleteEventsRequest.pb(
- error_stats_service.DeleteEventsRequest()
+ post_with_metadata.assert_not_called()
+ pb_message = error_stats_service.ListEventsRequest.pb(
+ error_stats_service.ListEventsRequest()
)
transcode.return_value = {
"method": "post",
@@ -3666,22 +3788,27 @@ def test_delete_events_rest_interceptors(null_interceptor):
"query_params": pb_message,
}
- req.return_value = Response()
+ req.return_value = mock.Mock()
req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = error_stats_service.DeleteEventsResponse.to_json(
- error_stats_service.DeleteEventsResponse()
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ return_value = error_stats_service.ListEventsResponse.to_json(
+ error_stats_service.ListEventsResponse()
)
+ req.return_value.content = return_value
- request = error_stats_service.DeleteEventsRequest()
+ request = error_stats_service.ListEventsRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
- post.return_value = error_stats_service.DeleteEventsResponse()
+ post.return_value = error_stats_service.ListEventsResponse()
+ post_with_metadata.return_value = (
+ error_stats_service.ListEventsResponse(),
+ metadata,
+ )
- client.delete_events(
+ client.list_events(
request,
metadata=[
("key", "val"),
@@ -3691,16 +3818,15 @@ def test_delete_events_rest_interceptors(null_interceptor):
pre.assert_called_once()
post.assert_called_once()
+ post_with_metadata.assert_called_once()
def test_delete_events_rest_bad_request(
- transport: str = "rest", request_type=error_stats_service.DeleteEventsRequest
+ request_type=error_stats_service.DeleteEventsRequest,
):
client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
-
# send a request that will satisfy transcoding
request_init = {"project_name": "projects/sample1"}
request = request_type(**request_init)
@@ -3710,179 +3836,185 @@ def test_delete_events_rest_bad_request(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
- response_value = Response()
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
response_value.status_code = 400
- response_value.request = Request()
+ response_value.request = mock.Mock()
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
client.delete_events(request)
-def test_delete_events_rest_flattened():
+@pytest.mark.parametrize(
+ "request_type",
+ [
+ error_stats_service.DeleteEventsRequest,
+ dict,
+ ],
+)
+def test_delete_events_rest_call_success(request_type):
client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request = request_type(**request_init)
+
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = error_stats_service.DeleteEventsResponse()
- # get arguments that satisfy an http rule for this method
- sample_request = {"project_name": "projects/sample1"}
-
- # get truthy value for each flattened field
- mock_args = dict(
- project_name="project_name_value",
- )
- mock_args.update(sample_request)
-
# Wrap the value into a proper Response obj
- response_value = Response()
+ response_value = mock.Mock()
response_value.status_code = 200
+
# Convert return value to protobuf type
return_value = error_stats_service.DeleteEventsResponse.pb(return_value)
json_return_value = json_format.MessageToJson(return_value)
- response_value._content = json_return_value.encode("UTF-8")
+ response_value.content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ response = client.delete_events(request)
- client.delete_events(**mock_args)
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(req.mock_calls) == 1
- _, args, _ = req.mock_calls[0]
- assert path_template.validate(
- "%s/v1beta1/{project_name=projects/*}/events" % client.transport._host,
- args[1],
- )
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, error_stats_service.DeleteEventsResponse)
-def test_delete_events_rest_flattened_error(transport: str = "rest"):
- client = ErrorStatsServiceClient(
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_events_rest_interceptors(null_interceptor):
+ transport = transports.ErrorStatsServiceRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ interceptor=None
+ if null_interceptor
+ else transports.ErrorStatsServiceRestInterceptor(),
)
+ client = ErrorStatsServiceClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor, "post_delete_events"
+ ) as post, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor, "post_delete_events_with_metadata"
+ ) as post_with_metadata, mock.patch.object(
+ transports.ErrorStatsServiceRestInterceptor, "pre_delete_events"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ post_with_metadata.assert_not_called()
+ pb_message = error_stats_service.DeleteEventsRequest.pb(
+ error_stats_service.DeleteEventsRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ return_value = error_stats_service.DeleteEventsResponse.to_json(
+ error_stats_service.DeleteEventsResponse()
+ )
+ req.return_value.content = return_value
+
+ request = error_stats_service.DeleteEventsRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = error_stats_service.DeleteEventsResponse()
+ post_with_metadata.return_value = (
+ error_stats_service.DeleteEventsResponse(),
+ metadata,
+ )
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
client.delete_events(
- error_stats_service.DeleteEventsRequest(),
- project_name="project_name_value",
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
)
+ pre.assert_called_once()
+ post.assert_called_once()
+ post_with_metadata.assert_called_once()
+
-def test_delete_events_rest_error():
+def test_initialize_client_w_rest():
client = ErrorStatsServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
+ assert client is not None
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.ErrorStatsServiceGrpcTransport(
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_group_stats_empty_call_rest():
+ client = ErrorStatsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- with pytest.raises(ValueError):
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
- # It is an error to provide a credentials file and a transport instance.
- transport = transports.ErrorStatsServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- with pytest.raises(ValueError):
- client = ErrorStatsServiceClient(
- client_options={"credentials_file": "credentials.json"},
- transport=transport,
- )
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call:
+ client.list_group_stats(request=None)
- # It is an error to provide an api_key and a transport instance.
- transport = transports.ErrorStatsServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = ErrorStatsServiceClient(
- client_options=options,
- transport=transport,
- )
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.ListGroupStatsRequest()
- # It is an error to provide an api_key and a credential.
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = ErrorStatsServiceClient(
- client_options=options, credentials=ga_credentials.AnonymousCredentials()
- )
+ assert args[0] == request_msg
- # It is an error to provide scopes and a transport instance.
- transport = transports.ErrorStatsServiceGrpcTransport(
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_events_empty_call_rest():
+ client = ErrorStatsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- with pytest.raises(ValueError):
- client = ErrorStatsServiceClient(
- client_options={"scopes": ["1", "2"]},
- transport=transport,
- )
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_events), "__call__") as call:
+ client.list_events(request=None)
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ErrorStatsServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- client = ErrorStatsServiceClient(transport=transport)
- assert client.transport is transport
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.ListEventsRequest()
+ assert args[0] == request_msg
-def test_transport_get_channel():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ErrorStatsServiceGrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- channel = transport.grpc_channel
- assert channel
- transport = transports.ErrorStatsServiceGrpcAsyncIOTransport(
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_events_empty_call_rest():
+ client = ErrorStatsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- channel = transport.grpc_channel
- assert channel
-
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.ErrorStatsServiceGrpcTransport,
- transports.ErrorStatsServiceGrpcAsyncIOTransport,
- transports.ErrorStatsServiceRestTransport,
- ],
-)
-def test_transport_adc(transport_class):
- # Test default credentials are used if not provided.
- with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- transport_class()
- adc.assert_called_once()
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_events), "__call__") as call:
+ client.delete_events(request=None)
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = error_stats_service.DeleteEventsRequest()
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- "rest",
- ],
-)
-def test_transport_kind(transport_name):
- transport = ErrorStatsServiceClient.get_transport_class(transport_name)(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- assert transport.kind == transport_name
+ assert args[0] == request_msg
def test_transport_grpc_default():
@@ -4467,36 +4599,41 @@ def test_client_with_default_client_info():
prep.assert_called_once_with(client_info)
+def test_transport_close_grpc():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
@pytest.mark.asyncio
-async def test_transport_close_async():
+async def test_transport_close_grpc_asyncio():
client = ErrorStatsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
)
with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
+ type(getattr(client.transport, "_grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
-def test_transport_close():
- transports = {
- "rest": "_session",
- "grpc": "_grpc_channel",
- }
-
- for transport, close_name in transports.items():
- client = ErrorStatsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+def test_transport_close_rest():
+ client = ErrorStatsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_session")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py
index ca1f397f..9ac7c004 100644
--- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py
+++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
import grpc
from grpc.experimental import aio
-from collections.abc import Iterable
+from collections.abc import Iterable, AsyncIterable
from google.protobuf import json_format
import json
import math
@@ -37,6 +37,13 @@
from requests.sessions import Session
from google.protobuf import json_format
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
@@ -62,10 +69,32 @@
import google.auth
+CRED_INFO_JSON = {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+}
+CRED_INFO_STRING = json.dumps(CRED_INFO_JSON)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -321,91 +350,46 @@ def test__get_universe_domain():
@pytest.mark.parametrize(
- "client_class,transport_class,transport_name",
+ "error_code,cred_info_json,show_cred_info",
[
- (
- ReportErrorsServiceClient,
- transports.ReportErrorsServiceGrpcTransport,
- "grpc",
- ),
- (
- ReportErrorsServiceClient,
- transports.ReportErrorsServiceRestTransport,
- "rest",
- ),
+ (401, CRED_INFO_JSON, True),
+ (403, CRED_INFO_JSON, True),
+ (404, CRED_INFO_JSON, True),
+ (500, CRED_INFO_JSON, False),
+ (401, None, False),
+ (403, None, False),
+ (404, None, False),
+ (500, None, False),
],
)
-def test__validate_universe_domain(client_class, transport_class, transport_name):
- client = client_class(
- transport=transport_class(credentials=ga_credentials.AnonymousCredentials())
- )
- assert client._validate_universe_domain() == True
-
- # Test the case when universe is already validated.
- assert client._validate_universe_domain() == True
-
- if transport_name == "grpc":
- # Test the case where credentials are provided by the
- # `local_channel_credentials`. The default universes in both match.
- channel = grpc.secure_channel(
- "http://localhost/", grpc.local_channel_credentials()
- )
- client = client_class(transport=transport_class(channel=channel))
- assert client._validate_universe_domain() == True
+def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info):
+ cred = mock.Mock(["get_cred_info"])
+ cred.get_cred_info = mock.Mock(return_value=cred_info_json)
+ client = ReportErrorsServiceClient(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=["foo"])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ if show_cred_info:
+ assert error.details == ["foo", CRED_INFO_STRING]
+ else:
+ assert error.details == ["foo"]
- # Test the case where credentials do not exist: e.g. a transport is provided
- # with no credentials. Validation should still succeed because there is no
- # mismatch with non-existent credentials.
- channel = grpc.secure_channel(
- "http://localhost/", grpc.local_channel_credentials()
- )
- transport = transport_class(channel=channel)
- transport._credentials = None
- client = client_class(transport=transport)
- assert client._validate_universe_domain() == True
- # TODO: This is needed to cater for older versions of google-auth
- # Make this test unconditional once the minimum supported version of
- # google-auth becomes 2.23.0 or higher.
- google_auth_major, google_auth_minor = [
- int(part) for part in google.auth.__version__.split(".")[0:2]
- ]
- if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
- credentials = ga_credentials.AnonymousCredentials()
- credentials._universe_domain = "foo.com"
- # Test the case when there is a universe mismatch from the credentials.
- client = client_class(transport=transport_class(credentials=credentials))
- with pytest.raises(ValueError) as excinfo:
- client._validate_universe_domain()
- assert (
- str(excinfo.value)
- == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
- )
+@pytest.mark.parametrize("error_code", [401, 403, 404, 500])
+def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code):
+ cred = mock.Mock([])
+ assert not hasattr(cred, "get_cred_info")
+ client = ReportErrorsServiceClient(credentials=cred)
+ client._transport._credentials = cred
- # Test the case when there is a universe mismatch from the client.
- #
- # TODO: Make this test unconditional once the minimum supported version of
- # google-api-core becomes 2.15.0 or higher.
- api_core_major, api_core_minor = [
- int(part) for part in api_core_version.__version__.split(".")[0:2]
- ]
- if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
- client = client_class(
- client_options={"universe_domain": "bar.com"},
- transport=transport_class(
- credentials=ga_credentials.AnonymousCredentials(),
- ),
- )
- with pytest.raises(ValueError) as excinfo:
- client._validate_universe_domain()
- assert (
- str(excinfo.value)
- == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
- )
+ error = core_exceptions.GoogleAPICallError("message", details=[])
+ error.code = error_code
- # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
- with pytest.raises(ValueError):
- client._compare_universes("foo.bar", None)
+ client._add_cred_info_for_auth_errors(error)
+ assert error.details == []
@pytest.mark.parametrize(
@@ -1230,27 +1214,6 @@ def test_report_error_event(request_type, transport: str = "grpc"):
assert isinstance(response, report_errors_service.ReportErrorEventResponse)
-def test_report_error_event_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ReportErrorsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.report_error_event), "__call__"
- ) as call:
- call.return_value.name = (
- "foo" # operation_request.operation in compute client(s) expect a string.
- )
- client.report_error_event()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == report_errors_service.ReportErrorEventRequest()
-
-
def test_report_error_event_non_empty_request_with_auto_populated_field():
# This test is a coverage failsafe to make sure that UUID4 fields are
# automatically populated, according to AIP-4235, with non-empty requests.
@@ -1320,29 +1283,6 @@ def test_report_error_event_use_cached_wrapped_rpc():
assert mock_rpc.call_count == 2
-@pytest.mark.asyncio
-async def test_report_error_event_empty_call_async():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.report_error_event), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- report_errors_service.ReportErrorEventResponse()
- )
- response = await client.report_error_event()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == report_errors_service.ReportErrorEventRequest()
-
-
@pytest.mark.asyncio
async def test_report_error_event_async_use_cached_wrapped_rpc(
transport: str = "grpc_asyncio",
@@ -1351,7 +1291,7 @@ async def test_report_error_event_async_use_cached_wrapped_rpc(
# instead of constructing them on each call
with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1391,7 +1331,7 @@ async def test_report_error_event_async(
request_type=report_errors_service.ReportErrorEventRequest,
):
client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1458,7 +1398,7 @@ def test_report_error_event_field_headers():
@pytest.mark.asyncio
async def test_report_error_event_field_headers_async():
client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1543,7 +1483,7 @@ def test_report_error_event_flattened_error():
@pytest.mark.asyncio
async def test_report_error_event_flattened_async():
client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1582,7 +1522,7 @@ async def test_report_error_event_flattened_async():
@pytest.mark.asyncio
async def test_report_error_event_flattened_error_async():
client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1597,135 +1537,6 @@ async def test_report_error_event_flattened_error_async():
)
-@pytest.mark.parametrize(
- "request_type",
- [
- report_errors_service.ReportErrorEventRequest,
- dict,
- ],
-)
-def test_report_error_event_rest(request_type):
- client = ReportErrorsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request_init["event"] = {
- "event_time": {"seconds": 751, "nanos": 543},
- "service_context": {
- "service": "service_value",
- "version": "version_value",
- "resource_type": "resource_type_value",
- },
- "message": "message_value",
- "context": {
- "http_request": {
- "method": "method_value",
- "url": "url_value",
- "user_agent": "user_agent_value",
- "referrer": "referrer_value",
- "response_status_code": 2156,
- "remote_ip": "remote_ip_value",
- },
- "user": "user_value",
- "report_location": {
- "file_path": "file_path_value",
- "line_number": 1168,
- "function_name": "function_name_value",
- },
- },
- }
- # The version of a generated dependency at test runtime may differ from the version used during generation.
- # Delete any fields which are not present in the current runtime dependency
- # See https://github.com/googleapis/gapic-generator-python/issues/1748
-
- # Determine if the message type is proto-plus or protobuf
- test_field = report_errors_service.ReportErrorEventRequest.meta.fields["event"]
-
- def get_message_fields(field):
- # Given a field which is a message (composite type), return a list with
- # all the fields of the message.
- # If the field is not a composite type, return an empty list.
- message_fields = []
-
- if hasattr(field, "message") and field.message:
- is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
-
- if is_field_type_proto_plus_type:
- message_fields = field.message.meta.fields.values()
- # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
- else: # pragma: NO COVER
- message_fields = field.message.DESCRIPTOR.fields
- return message_fields
-
- runtime_nested_fields = [
- (field.name, nested_field.name)
- for field in get_message_fields(test_field)
- for nested_field in get_message_fields(field)
- ]
-
- subfields_not_in_runtime = []
-
- # For each item in the sample request, create a list of sub fields which are not present at runtime
- # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
- for field, value in request_init["event"].items(): # pragma: NO COVER
- result = None
- is_repeated = False
- # For repeated fields
- if isinstance(value, list) and len(value):
- is_repeated = True
- result = value[0]
- # For fields where the type is another message
- if isinstance(value, dict):
- result = value
-
- if result and hasattr(result, "keys"):
- for subfield in result.keys():
- if (field, subfield) not in runtime_nested_fields:
- subfields_not_in_runtime.append(
- {
- "field": field,
- "subfield": subfield,
- "is_repeated": is_repeated,
- }
- )
-
- # Remove fields from the sample request which are not present in the runtime version of the dependency
- # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
- for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
- field = subfield_to_delete.get("field")
- field_repeated = subfield_to_delete.get("is_repeated")
- subfield = subfield_to_delete.get("subfield")
- if subfield:
- if field_repeated:
- for i in range(0, len(request_init["event"][field])):
- del request_init["event"][field][i][subfield]
- else:
- del request_init["event"][field][subfield]
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = report_errors_service.ReportErrorEventResponse()
-
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 200
- # Convert return value to protobuf type
- return_value = report_errors_service.ReportErrorEventResponse.pb(return_value)
- json_return_value = json_format.MessageToJson(return_value)
-
- response_value._content = json_return_value.encode("UTF-8")
- req.return_value = response_value
- response = client.report_error_event(request)
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, report_errors_service.ReportErrorEventResponse)
-
-
def test_report_error_event_rest_use_cached_wrapped_rpc():
# Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
# instead of constructing them on each call
@@ -1835,6 +1646,7 @@ def test_report_error_event_rest_required_fields(
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
response = client.report_error_event(request)
@@ -1860,102 +1672,19 @@ def test_report_error_event_rest_unset_required_fields():
)
-@pytest.mark.parametrize("null_interceptor", [True, False])
-def test_report_error_event_rest_interceptors(null_interceptor):
- transport = transports.ReportErrorsServiceRestTransport(
+def test_report_error_event_rest_flattened():
+ client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
- interceptor=None
- if null_interceptor
- else transports.ReportErrorsServiceRestInterceptor(),
+ transport="rest",
)
- client = ReportErrorsServiceClient(transport=transport)
- with mock.patch.object(
- type(client.transport._session), "request"
- ) as req, mock.patch.object(
- path_template, "transcode"
- ) as transcode, mock.patch.object(
- transports.ReportErrorsServiceRestInterceptor, "post_report_error_event"
- ) as post, mock.patch.object(
- transports.ReportErrorsServiceRestInterceptor, "pre_report_error_event"
- ) as pre:
- pre.assert_not_called()
- post.assert_not_called()
- pb_message = report_errors_service.ReportErrorEventRequest.pb(
- report_errors_service.ReportErrorEventRequest()
- )
- transcode.return_value = {
- "method": "post",
- "uri": "my_uri",
- "body": pb_message,
- "query_params": pb_message,
- }
- req.return_value = Response()
- req.return_value.status_code = 200
- req.return_value.request = PreparedRequest()
- req.return_value._content = (
- report_errors_service.ReportErrorEventResponse.to_json(
- report_errors_service.ReportErrorEventResponse()
- )
- )
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = report_errors_service.ReportErrorEventResponse()
- request = report_errors_service.ReportErrorEventRequest()
- metadata = [
- ("key", "val"),
- ("cephalopod", "squid"),
- ]
- pre.return_value = request, metadata
- post.return_value = report_errors_service.ReportErrorEventResponse()
-
- client.report_error_event(
- request,
- metadata=[
- ("key", "val"),
- ("cephalopod", "squid"),
- ],
- )
-
- pre.assert_called_once()
- post.assert_called_once()
-
-
-def test_report_error_event_rest_bad_request(
- transport: str = "rest", request_type=report_errors_service.ReportErrorEventRequest
-):
- client = ReportErrorsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
-
- # send a request that will satisfy transcoding
- request_init = {"project_name": "projects/sample1"}
- request = request_type(**request_init)
-
- # Mock the http request call within the method and fake a BadRequest error.
- with mock.patch.object(Session, "request") as req, pytest.raises(
- core_exceptions.BadRequest
- ):
- # Wrap the value into a proper Response obj
- response_value = Response()
- response_value.status_code = 400
- response_value.request = Request()
- req.return_value = response_value
- client.report_error_event(request)
-
-
-def test_report_error_event_rest_flattened():
- client = ReportErrorsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="rest",
- )
-
- # Mock the http request call within the method and fake a response.
- with mock.patch.object(type(client.transport._session), "request") as req:
- # Designate an appropriate value for the returned response.
- return_value = report_errors_service.ReportErrorEventResponse()
-
- # get arguments that satisfy an http rule for this method
- sample_request = {"project_name": "projects/sample1"}
+ # get arguments that satisfy an http rule for this method
+ sample_request = {"project_name": "projects/sample1"}
# get truthy value for each flattened field
mock_args = dict(
@@ -1974,6 +1703,7 @@ def test_report_error_event_rest_flattened():
json_return_value = json_format.MessageToJson(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
client.report_error_event(**mock_args)
@@ -2006,12 +1736,6 @@ def test_report_error_event_rest_flattened_error(transport: str = "rest"):
)
-def test_report_error_event_rest_error():
- client = ReportErrorsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(), transport="rest"
- )
-
-
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
@@ -2104,18 +1828,340 @@ def test_transport_adc(transport_class):
adc.assert_called_once()
+def test_transport_kind_grpc():
+ transport = ReportErrorsServiceClient.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_report_error_event_empty_call_grpc():
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.report_error_event), "__call__"
+ ) as call:
+ call.return_value = report_errors_service.ReportErrorEventResponse()
+ client.report_error_event(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = report_errors_service.ReportErrorEventRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = ReportErrorsServiceAsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = ReportErrorsServiceAsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_report_error_event_empty_call_grpc_asyncio():
+ client = ReportErrorsServiceAsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.report_error_event), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ report_errors_service.ReportErrorEventResponse()
+ )
+ await client.report_error_event(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = report_errors_service.ReportErrorEventRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+ transport = ReportErrorsServiceClient.get_transport_class("rest")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "rest"
+
+
+def test_report_error_event_rest_bad_request(
+ request_type=report_errors_service.ReportErrorEventRequest,
+):
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a BadRequest error.
+ with mock.patch.object(Session, "request") as req, pytest.raises(
+ core_exceptions.BadRequest
+ ):
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ json_return_value = ""
+ response_value.json = mock.Mock(return_value={})
+ response_value.status_code = 400
+ response_value.request = mock.Mock()
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ client.report_error_event(request)
+
+
@pytest.mark.parametrize(
- "transport_name",
+ "request_type",
[
- "grpc",
- "rest",
+ report_errors_service.ReportErrorEventRequest,
+ dict,
],
)
-def test_transport_kind(transport_name):
- transport = ReportErrorsServiceClient.get_transport_class(transport_name)(
+def test_report_error_event_rest_call_success(request_type):
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+
+ # send a request that will satisfy transcoding
+ request_init = {"project_name": "projects/sample1"}
+ request_init["event"] = {
+ "event_time": {"seconds": 751, "nanos": 543},
+ "service_context": {
+ "service": "service_value",
+ "version": "version_value",
+ "resource_type": "resource_type_value",
+ },
+ "message": "message_value",
+ "context": {
+ "http_request": {
+ "method": "method_value",
+ "url": "url_value",
+ "user_agent": "user_agent_value",
+ "referrer": "referrer_value",
+ "response_status_code": 2156,
+ "remote_ip": "remote_ip_value",
+ },
+ "user": "user_value",
+ "report_location": {
+ "file_path": "file_path_value",
+ "line_number": 1168,
+ "function_name": "function_name_value",
+ },
+ },
+ }
+ # The version of a generated dependency at test runtime may differ from the version used during generation.
+ # Delete any fields which are not present in the current runtime dependency
+ # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+ # Determine if the message type is proto-plus or protobuf
+ test_field = report_errors_service.ReportErrorEventRequest.meta.fields["event"]
+
+ def get_message_fields(field):
+ # Given a field which is a message (composite type), return a list with
+ # all the fields of the message.
+ # If the field is not a composite type, return an empty list.
+ message_fields = []
+
+ if hasattr(field, "message") and field.message:
+ is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+ if is_field_type_proto_plus_type:
+ message_fields = field.message.meta.fields.values()
+ # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+ else: # pragma: NO COVER
+ message_fields = field.message.DESCRIPTOR.fields
+ return message_fields
+
+ runtime_nested_fields = [
+ (field.name, nested_field.name)
+ for field in get_message_fields(test_field)
+ for nested_field in get_message_fields(field)
+ ]
+
+ subfields_not_in_runtime = []
+
+ # For each item in the sample request, create a list of sub fields which are not present at runtime
+ # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+ for field, value in request_init["event"].items(): # pragma: NO COVER
+ result = None
+ is_repeated = False
+ # For repeated fields
+ if isinstance(value, list) and len(value):
+ is_repeated = True
+ result = value[0]
+ # For fields where the type is another message
+ if isinstance(value, dict):
+ result = value
+
+ if result and hasattr(result, "keys"):
+ for subfield in result.keys():
+ if (field, subfield) not in runtime_nested_fields:
+ subfields_not_in_runtime.append(
+ {
+ "field": field,
+ "subfield": subfield,
+ "is_repeated": is_repeated,
+ }
+ )
+
+ # Remove fields from the sample request which are not present in the runtime version of the dependency
+ # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+ for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+ field = subfield_to_delete.get("field")
+ field_repeated = subfield_to_delete.get("is_repeated")
+ subfield = subfield_to_delete.get("subfield")
+ if subfield:
+ if field_repeated:
+ for i in range(0, len(request_init["event"][field])):
+ del request_init["event"][field][i][subfield]
+ else:
+ del request_init["event"][field][subfield]
+ request = request_type(**request_init)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(type(client.transport._session), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = report_errors_service.ReportErrorEventResponse()
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+
+ # Convert return value to protobuf type
+ return_value = report_errors_service.ReportErrorEventResponse.pb(return_value)
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ response = client.report_error_event(request)
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, report_errors_service.ReportErrorEventResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_report_error_event_rest_interceptors(null_interceptor):
+ transport = transports.ReportErrorsServiceRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ interceptor=None
+ if null_interceptor
+ else transports.ReportErrorsServiceRestInterceptor(),
+ )
+ client = ReportErrorsServiceClient(transport=transport)
+
+ with mock.patch.object(
+ type(client.transport._session), "request"
+ ) as req, mock.patch.object(
+ path_template, "transcode"
+ ) as transcode, mock.patch.object(
+ transports.ReportErrorsServiceRestInterceptor, "post_report_error_event"
+ ) as post, mock.patch.object(
+ transports.ReportErrorsServiceRestInterceptor,
+ "post_report_error_event_with_metadata",
+ ) as post_with_metadata, mock.patch.object(
+ transports.ReportErrorsServiceRestInterceptor, "pre_report_error_event"
+ ) as pre:
+ pre.assert_not_called()
+ post.assert_not_called()
+ post_with_metadata.assert_not_called()
+ pb_message = report_errors_service.ReportErrorEventRequest.pb(
+ report_errors_service.ReportErrorEventRequest()
+ )
+ transcode.return_value = {
+ "method": "post",
+ "uri": "my_uri",
+ "body": pb_message,
+ "query_params": pb_message,
+ }
+
+ req.return_value = mock.Mock()
+ req.return_value.status_code = 200
+ req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"}
+ return_value = report_errors_service.ReportErrorEventResponse.to_json(
+ report_errors_service.ReportErrorEventResponse()
+ )
+ req.return_value.content = return_value
+
+ request = report_errors_service.ReportErrorEventRequest()
+ metadata = [
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ]
+ pre.return_value = request, metadata
+ post.return_value = report_errors_service.ReportErrorEventResponse()
+ post_with_metadata.return_value = (
+ report_errors_service.ReportErrorEventResponse(),
+ metadata,
+ )
+
+ client.report_error_event(
+ request,
+ metadata=[
+ ("key", "val"),
+ ("cephalopod", "squid"),
+ ],
+ )
+
+ pre.assert_called_once()
+ post.assert_called_once()
+ post_with_metadata.assert_called_once()
+
+
+def test_initialize_client_w_rest():
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_report_error_event_empty_call_rest():
+ client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="rest",
)
- assert transport.kind == transport_name
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.report_error_event), "__call__"
+ ) as call:
+ client.report_error_event(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = report_errors_service.ReportErrorEventRequest()
+
+ assert args[0] == request_msg
def test_transport_grpc_default():
@@ -2667,36 +2713,41 @@ def test_client_with_default_client_info():
prep.assert_called_once_with(client_info)
+def test_transport_close_grpc():
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
@pytest.mark.asyncio
-async def test_transport_close_async():
+async def test_transport_close_grpc_asyncio():
client = ReportErrorsServiceAsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
)
with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
+ type(getattr(client.transport, "_grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
-def test_transport_close():
- transports = {
- "rest": "_session",
- "grpc": "_grpc_channel",
- }
-
- for transport, close_name in transports.items():
- client = ReportErrorsServiceClient(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+def test_transport_close_rest():
+ client = ReportErrorsServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="rest"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_session")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():