diff --git a/.flake8 b/.flake8 index 89954f8bd..90316de21 100644 --- a/.flake8 +++ b/.flake8 @@ -1,31 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues ignore = E203, E231, E266, E501, W503 exclude = - # Exclude environment test code. - tests/environment/** - - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a3da1b0d4..6f1eaeb91 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:ecf409a43d8b157fb83c403de4d83e3da7d88e423044410c0e2434bf776221d1 +# created: 2025-04-10T16:21:41.67162455Z diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2a3b42055..0738e11ee 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-logging +# @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners -# @googleapis/python-samples-reviewers @googleapis/api-logging are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging +# @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index a9d3f44e3..d5f69b10a 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -1,4 +1,20 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - googleapis/api-logging-reviewers + - googleapis/api-logging + - googleapis/api-logging-partners + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/api-logging + - googleapis/api-logging-partners + assign_prs: - - googleapis/api-logging-reviewers + - googleapis/api-logging + - googleapis/api-logging-partners diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml index d4ca94189..d47d146a9 100644 --- a/.github/release-trigger.yml +++ b/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: python-logging diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 37438d33d..439a0bcb7 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -12,3 +12,7 @@ branchProtectionRules: - 'Samples - Lint' - 'Samples - Python 3.7' - 'Samples - Python 3.8' + - 'Samples - Python 3.9' + - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' diff --git a/.gitignore b/.gitignore index b4243ced7..d083ea1dd 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/.kokoro/build.sh b/.kokoro/build.sh index afa7a81aa..d41b45aa1 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-logging" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,17 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") - -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -53,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index 8e39a2cc4..000000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:22.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - python3-distutils \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - -###################### Install python 3.9.13 - -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz - -# Extract files -RUN tar -xvf Python-3.9.13.tgz - -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations -RUN make altinstall - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3 -m pip - -CMD ["python3.8"] diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index 36e4a6540..000000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,85 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 3d5288bef..000000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index 6f3972140..c435402f4 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 9eafe0be3..000000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version - -# build docs -nox -s docs - -# create metadata -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index 9bdfbceb5..000000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-logging/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") -cd github/python-logging -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index 4dc3167a5..000000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,69 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-logging/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" - } - } -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-logging/**/*.tar.gz" - strip_prefix: "github/python-logging" - } -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "logging" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index ec867d9fd..000000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,10 +0,0 @@ -gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index 029bd342d..000000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,496 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e - # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c - # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db - # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 - # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 - # via - # gcp-docuploader - # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de - # via - # gcp-releasetool - # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e - # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc - # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 - # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf - # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 - # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a - # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f - # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c - # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 - # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 - # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 - # via - # gcp-releasetool - # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 - # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab - # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 - # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 - # via - # gcp-releasetool - # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c - # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 - # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 - # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 - # via - # readme-renderer - # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 - # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 - # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 - # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 - # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # bleach - # gcp-docuploader - # google-auth - # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 - # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e - # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 - # via - # requests - # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 - # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 - # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f - # via -r requirements.in diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000..fb8ce8795 --- /dev/null +++ b/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,59 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000..7e2973e3b --- /dev/null +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000..4eb8ee8be --- /dev/null +++ b/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,60 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000..7e2973e3b --- /dev/null +++ b/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 63ac41dfa..e9d8bd79a 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 5a0f5fab6..53e365bc4 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 50b35a48c..7933d8201 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index d85b1f267..48f796997 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 59a7cf3a9..35fa52923 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19409cbd3..1d74695f7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 23efc1eaa..d235af2ce 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.6.0" + ".": "3.12.1" } diff --git a/.repo-metadata.json b/.repo-metadata.json index 0b6c0d8ca..83c212332 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -10,7 +10,7 @@ "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com", - "codeowner_team": "@googleapis/api-logging", + "codeowner_team": "@googleapis/api-logging @googleapis/api-logging-partners", "default_version": "v2", "api_shortname": "logging", "api_description": "allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud and Amazon Web Services. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premises systems, and hybrid cloud systems. BindPlane is included with your Google Cloud project at no additional cost." diff --git a/.trampolinerc b/.trampolinerc index 65248f703..636e35c32 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/CHANGELOG.md b/CHANGELOG.md index 16e128b18..1f98b01a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,163 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.12.1](https://github.com/googleapis/python-logging/compare/v3.12.0...v3.12.1) (2025-04-21) + + +### Bug Fixes + +* Make logging handler close conditional to having the transport opened ([#990](https://github.com/googleapis/python-logging/issues/990)) ([66c6b91](https://github.com/googleapis/python-logging/commit/66c6b91725eb479a0af138a2be13f3c25f369d7e)) + +## [3.12.0](https://github.com/googleapis/python-logging/compare/v3.11.4...v3.12.0) (2025-04-10) + + +### Features + +* Add REST Interceptors which support reading metadata ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77)) +* Add support for opt-in debug logging ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77)) +* Added flushes/close functionality to logging handlers ([#917](https://github.com/googleapis/python-logging/issues/917)) ([d179304](https://github.com/googleapis/python-logging/commit/d179304b344277e349456f72cd90c56f28011286)) + + +### Bug Fixes + +* Allow protobuf 6.x ([#977](https://github.com/googleapis/python-logging/issues/977)) ([6757890](https://github.com/googleapis/python-logging/commit/675789001344fdae68ee20ec14e14c11c83a0433)) +* **deps:** Require google-cloud-audit-log >= 0.3.1 ([#979](https://github.com/googleapis/python-logging/issues/979)) ([1cc00ec](https://github.com/googleapis/python-logging/commit/1cc00ecf646a7a36eb32afd2e5df3d9aa7f564b1)) +* Fix typing issue with gRPC metadata when key ends in -bin ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77)) + + +### Documentation + +* Added documentation on log_level and excluded_loggers params in setup_logging ([#971](https://github.com/googleapis/python-logging/issues/971)) ([70d9d25](https://github.com/googleapis/python-logging/commit/70d9d25bf8c3c85a3c5523ecc7fbdbf72f08c583)) +* Update README to break infinite redirect loop ([#972](https://github.com/googleapis/python-logging/issues/972)) ([52cd907](https://github.com/googleapis/python-logging/commit/52cd907bb313df2766ec11e3d24c7e10cda31ca7)) + +## [3.11.4](https://github.com/googleapis/python-logging/compare/v3.11.3...v3.11.4) (2025-01-22) + + +### Bug Fixes + +* Made `write_entries` raise `ValueError` on `ParseError`s ([#958](https://github.com/googleapis/python-logging/issues/958)) ([5309478](https://github.com/googleapis/python-logging/commit/5309478c054d0f2b9301817fd835f2098f51dc3a)) +* Require proto-plus >= 1.25 for Python 3.13 ([#955](https://github.com/googleapis/python-logging/issues/955)) ([7baed8e](https://github.com/googleapis/python-logging/commit/7baed8e968f0bfa6abdbf0715dc43822f2fba8ba)) +* Require proto-plus >= 1.25 for Python 3.13 ([#955](https://github.com/googleapis/python-logging/issues/955)) ([002b1fc](https://github.com/googleapis/python-logging/commit/002b1fcb395d77d94d7216560c30015b9aefca81)) + +## [3.11.3](https://github.com/googleapis/python-logging/compare/v3.11.2...v3.11.3) (2024-10-15) + + +### Bug Fixes + +* 16-bit hexadecimal formatting for XCTC span IDs ([#946](https://github.com/googleapis/python-logging/issues/946)) ([1f2b190](https://github.com/googleapis/python-logging/commit/1f2b190c0d1a7125d9412c157915d0011cdd4c47)) + +## [3.11.2](https://github.com/googleapis/python-logging/compare/v3.11.1...v3.11.2) (2024-08-15) + + +### Bug Fixes + +* **deps:** Require google-cloud-appengine-logging>=0.1.3 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374)) +* **deps:** Require google-cloud-audit-log >= 0.2.4 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374)) +* **deps:** Require opentelemetry-api>=1.9.0 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374)) +* Fixed type hinting issue with specifying Transport class ([#930](https://github.com/googleapis/python-logging/issues/930)) ([e2875d6](https://github.com/googleapis/python-logging/commit/e2875d664c153a4328bd42790dfb7b4ac36a9048)) + +## [3.11.1](https://github.com/googleapis/python-logging/compare/v3.11.0...v3.11.1) (2024-08-06) + + +### Bug Fixes + +* Allow protobuf 5.x ([#888](https://github.com/googleapis/python-logging/issues/888)) ([7746e64](https://github.com/googleapis/python-logging/commit/7746e643af29b1008d6e6d6a9958c8337c958dd4)) + +## [3.11.0](https://github.com/googleapis/python-logging/compare/v3.10.0...v3.11.0) (2024-07-15) + + +### Features + +* OpenTelemetry trace/spanID integration for Python handlers ([#889](https://github.com/googleapis/python-logging/issues/889)) ([78168a3](https://github.com/googleapis/python-logging/commit/78168a38577b698130a861af4e4d229f42660330)) + + +### Bug Fixes + +* Added environment specific labels to client library when running in Cloud Run Jobs ([#877](https://github.com/googleapis/python-logging/issues/877)) ([9c5e8f0](https://github.com/googleapis/python-logging/commit/9c5e8f0548f88235fe6474469bc37685e2498dd1)) +* Added missing import into logger.py ([#896](https://github.com/googleapis/python-logging/issues/896)) ([9ca242d](https://github.com/googleapis/python-logging/commit/9ca242d10f9f3bca120b292f478d62f5fa1d3c06)) +* Added type hints to CloudLoggingHandler constructor ([#903](https://github.com/googleapis/python-logging/issues/903)) ([6959345](https://github.com/googleapis/python-logging/commit/69593459614be968f7a0136aa76701c4fc408834)) + + +### Documentation + +* Add summary_overview template ([#878](https://github.com/googleapis/python-logging/issues/878)) ([b60714c](https://github.com/googleapis/python-logging/commit/b60714cb1cc3aac79c86225f8f9cbd24d8ab170f)) +* Changed table in web-framework-integration to bulleted list ([#875](https://github.com/googleapis/python-logging/issues/875)) ([a4aa3a7](https://github.com/googleapis/python-logging/commit/a4aa3a7cf1e3bb32ec2772084a7dc6c16e1454ff)) +* Documentation update for OpenTelemetry ([#915](https://github.com/googleapis/python-logging/issues/915)) ([2a0539a](https://github.com/googleapis/python-logging/commit/2a0539a30e6dcf45c0970e3aacfd4a2772877526)) +* Update `dictConfig` snippet ([#885](https://github.com/googleapis/python-logging/issues/885)) ([6264107](https://github.com/googleapis/python-logging/commit/62641075042a3da9bb9c059d963bad14a1586b1c)) + +## [3.10.0](https://github.com/googleapis/python-logging/compare/v3.9.0...v3.10.0) (2024-03-13) + + +### Features + +* Allow users to explicitly configure universe domain ([#846](https://github.com/googleapis/python-logging/issues/846)) ([e998a21](https://github.com/googleapis/python-logging/commit/e998a219740cf8b2373e462867244a6860b0c88c)) + + +### Bug Fixes + +* Added placeholder kwargs to StructuredLogHandler ([#845](https://github.com/googleapis/python-logging/issues/845)) ([9bc0a37](https://github.com/googleapis/python-logging/commit/9bc0a37d910340d828db8bab33d67785f184f00c)) +* Allowed for a partial override of loggers that get excluded from setup_client ([#831](https://github.com/googleapis/python-logging/issues/831)) ([870c940](https://github.com/googleapis/python-logging/commit/870c9403e03d31a0f22dddc257cd5fb2b4fc5ee3)) +* Remove usage in including_default_value_fields to prepare for protobuf 5.x ([#866](https://github.com/googleapis/python-logging/issues/866)) ([66a534d](https://github.com/googleapis/python-logging/commit/66a534d1b83d7c63f5c7b013bf27ed54dd2786c3)) +* Use value of cluster-location in GKE for tagging location ([#830](https://github.com/googleapis/python-logging/issues/830)) ([c15847c](https://github.com/googleapis/python-logging/commit/c15847c215c18ad3970efba12f5d337e6d499883)) + + +### Documentation + +* Added documentation for Django/Flask integrations and dictConfig ([#848](https://github.com/googleapis/python-logging/issues/848)) ([c65ec92](https://github.com/googleapis/python-logging/commit/c65ec92bf348e2bcdd8f4c5bacc152cfb4737eb1)) + +## [3.9.0](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0) (2023-12-08) + + +### Features + +* Add support for Python 3.12 ([#813](https://github.com/googleapis/python-logging/issues/813)) ([6591b53](https://github.com/googleapis/python-logging/commit/6591b53e3fcd67e156765f329700443647b70349)) +* Use native namespaces instead of pkg_resources ([#812](https://github.com/googleapis/python-logging/issues/812)) ([10ad75d](https://github.com/googleapis/python-logging/commit/10ad75d2b9276df389f5069f9f143f8f4621d04d)) + + +### Bug Fixes + +* Fixed DeprecationWarning for datetime objects for Python 3.12 ([#824](https://github.com/googleapis/python-logging/issues/824)) ([2384981](https://github.com/googleapis/python-logging/commit/2384981c9137a57a647a69a32b67dcacd619ea0a)) +* Fixed object paths in autogenerated code in owlbot.py ([#804](https://github.com/googleapis/python-logging/issues/804)) ([b14bb14](https://github.com/googleapis/python-logging/commit/b14bb144fad2dcf067b7e62e402b708f45ebadbe)) +* Updated protobuf JSON formatting to support nested protobufs ([#797](https://github.com/googleapis/python-logging/issues/797)) ([a00c261](https://github.com/googleapis/python-logging/commit/a00c261ee07a5dcaac9f5b966b4bb6729a2bbe65)) +* Use `retry_async` instead of `retry` in async client ([#816](https://github.com/googleapis/python-logging/issues/816)) ([c79f7f5](https://github.com/googleapis/python-logging/commit/c79f7f55dddb170eac29f24b23bfe1dde8bfbda8)) +* Use warning instead of warn in system tests to avoid DeprecationWarning ([#821](https://github.com/googleapis/python-logging/issues/821)) ([c447175](https://github.com/googleapis/python-logging/commit/c4471758e1efee0e3599b08969449b2ce71bd1b4)) + +## [3.8.0](https://github.com/googleapis/python-logging/compare/v3.7.0...v3.8.0) (2023-10-03) + + +### Features + +* Add cloud_run_job monitored resource type. ([#788](https://github.com/googleapis/python-logging/issues/788)) ([3b310d6](https://github.com/googleapis/python-logging/commit/3b310d68b68df5bb31e21ac30b23207ef50c3f6f)) + +## [3.7.0](https://github.com/googleapis/python-logging/compare/v3.6.0...v3.7.0) (2023-09-25) + + +### Features + +* Add ConfigServiceV2.CreateBucketAsync method for creating Log Buckets asynchronously ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.CreateLink method for creating linked datasets for Log Analytics Buckets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.DeleteLink method for deleting linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.GetLink methods for describing linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.ListLinks method for listing linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add ConfigServiceV2.UpdateBucketAsync method for creating Log Buckets asynchronously ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add LogBucket.analytics_enabled field that specifies whether Log Bucket's Analytics features are enabled ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add LogBucket.index_configs field that contains a list of Log Bucket's indexed fields and related configuration data ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Log Analytics features of the Cloud Logging API ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) + + +### Bug Fixes + +* Add async context manager return types ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Add severity to structured log write ([#783](https://github.com/googleapis/python-logging/issues/783)) ([31a7f69](https://github.com/googleapis/python-logging/commit/31a7f69ed94719546136a3bf1b3ecdb28e369414)) +* Handle exceptions raised when fetching Django request data ([#758](https://github.com/googleapis/python-logging/issues/758)) ([5ecf886](https://github.com/googleapis/python-logging/commit/5ecf88606b4f29b00ff8b18ae71c151d203d5c3b)) +* Unintended exception omittion ([#736](https://github.com/googleapis/python-logging/issues/736)) ([022dc54](https://github.com/googleapis/python-logging/commit/022dc545f781648043296b3ca04d835fcb6f1d7e)) + + +### Documentation + +* Documentation for the Log Analytics features of the Cloud Logging API ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) +* Minor formatting ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93)) + ## [3.6.0](https://github.com/googleapis/python-logging/compare/v3.5.0...v3.6.0) (2023-07-05) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 6fa7a4dac..7bbacd5ca 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.8 -- -k + $ nox -s system-3.12 -- -k .. note:: - System tests are only configured to run under Python 3.8. + System tests are only configured to run under Python 3.12. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local @@ -226,12 +226,16 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/MANIFEST.in b/MANIFEST.in index e0a667053..d6814cd60 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/README.rst b/README.rst index 2618dc37a..d9549ed7d 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ Logging configuration. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg :target: https://pypi.org/project/google-cloud-logging/ .. _Cloud Logging API: https://cloud.google.com/logging -.. _Client Library Documentation: https://googleapis.dev/python/logging/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/logging/latest/summary_overview .. _Product Documentation: https://cloud.google.com/logging/docs .. _Setting Up Cloud Logging for Python: https://cloud.google.com/logging/docs/setup/python .. _Python's standard logging library: https://docs.python.org/2/library/logging.html @@ -61,8 +61,8 @@ Python >= 3.7 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. The last version of the library compatible with Python 2.7 is `google-cloud-logging==1.15.1`. -Python == 3.6. The last version of the library compatible with Python 3.6 is `google-cloud-logging==3.1.2`. +| Python == 2.7. The last version of the library compatible with Python 2.7 is ``google-cloud-logging==1.15.1``. +| Python == 3.6. The last version of the library compatible with Python 3.6 is ``google-cloud-logging==3.1.2``. Mac/Linux diff --git a/docs/auto-trace-span-extraction.rst b/docs/auto-trace-span-extraction.rst new file mode 100644 index 000000000..1eb21fb78 --- /dev/null +++ b/docs/auto-trace-span-extraction.rst @@ -0,0 +1,27 @@ +Automatic Trace/Span ID Extraction +================================== + +.. note:: + All `LogEntry fields`_ populated :ref:`manually` will override those populated via methods referred to in this + section. + +The Google Cloud Logging library can automatically populate `LogEntry fields`_ +`trace`, `span_id`, and `trace_sampled` via OpenTelemetry integration, or extracting header information from an HTTP request. + +OpenTelemetry Integration +------------------------- + +If you have the OpenTelemetry SDK package installed and are logging from within an active OpenTelemetry span, that log entry will automatically +have the `trace`, `span_id`, and `trace_sampled` fields populated from that span. More information about OpenTelemetry can be found +`here `_. + +HTTP headers +------------ + +Another possible method of automatic `trace` / `span_id` is via extraction from HTTP headers. +This is prioritized after OpenTelemetry and requires a :doc:`supported Python web framework `. +Trace information is automatically populated from either the `W3C Traceparent `_ +or `X-Cloud-Trace-Context `_ headers. +Populating trace information this way also automatically populates the `http_request` field in the `LogEntry` as well. + +.. _LogEntry fields: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry diff --git a/docs/conf.py b/docs/conf.py index fffea8f16..a65cf85ff 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/index.rst b/docs/index.rst index 01d8e4eee..08f049c16 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -41,3 +41,8 @@ For a list of all ``google-cloud-logging`` releases: :maxdepth: 2 changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst index a485fce6d..cf00c37ae 100644 --- a/docs/std-lib-integration.rst +++ b/docs/std-lib-integration.rst @@ -16,6 +16,21 @@ call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~ :end-before: [END logging_handler_setup] :dedent: 4 + +You can also set the logging level threshold of the logging handler created by :meth:`~google.cloud.logging_v2.client.Client.setup_logging`, +as well as set loggers excluded from the logger that is created: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_setup_logging] + :end-before: [END logging_setup_logging] + :dedent: 4 + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_setup_logging_excludes] + :end-before: [END logging_setup_logging_excludes] + :dedent: 4 + + This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your code is running on. For more information, see the `Google Cloud Logging documentation `_. @@ -44,6 +59,16 @@ There are two supported handler classes to choose from: to standard out, to be read and parsed by a GCP logging agent - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run +Handler classes can also be specified via `dictConfig `_: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_dict_config] + :end-before: [END logging_dict_config] + :dedent: 4 + +Note that since :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` requires an already initialized :class:`~google.cloud.logging_v2.client.Client`, +you must initialize a client and include it in the dictConfig entry for a `CloudLoggingHandler`. + Standard Library --------------------------- @@ -92,32 +117,35 @@ The Google Cloud Logging library attempts to detect and attach additional The following fields are currently supported: - labels -- trace* -- span_id* -- trace_sampled* -- http_request* +- trace +- span_id +- trace_sampled +- http_request - source_location - resource - :ref:`json_fields` .. note:: - Fields marked with "*" require a supported Python web framework. The Google Cloud Logging - library currently supports `flask `_ and `django `_ + | More information about `trace`, `span_id`, and `trace_sampled` can be found :doc:`here `. + | `http_request` requires a :doc:`supported Python web framework `. + Manual Metadata Using the `extra` Argument -------------------------------------------- +.. _Manual-Metadata: + The Python :mod:`logging` standard library accepts `an "extra" argument `_ when writing logs. You can use this argument to populate LogRecord objects with user-defined key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional -metadata to populate `LogEntry fields `_. +metadata to populate `LogEntry fields`_. .. literalinclude:: ../samples/snippets/usage_guide.py :start-after: [START logging_extras] :end-before: [END logging_extras] :dedent: 4 -All of the `LogEntry fields `_ +All of the `LogEntry fields`_ that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` argument override any :ref:`automatically detected` fields. @@ -144,3 +172,5 @@ You can use both transport options over :doc:`gRPC or HTTP`. .. note:: :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` prints logs as formatted JSON to standard output, and does not use a Transport class. + +.. _LogEntry fields: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry \ No newline at end of file diff --git a/docs/summary_overview.md b/docs/summary_overview.md new file mode 100644 index 000000000..4786fbcaa --- /dev/null +++ b/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Cloud Logging API + +Overview of the APIs available for Cloud Logging API. + +## All entries + +Classes, methods and properties & attributes for +Cloud Logging API. + +[classes](https://cloud.google.com/python/docs/reference/logging/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/logging/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/logging/latest/summary_property.html) diff --git a/docs/usage.rst b/docs/usage.rst index 929ee9cef..c28be0c6f 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -4,6 +4,8 @@ Usage Guide :maxdepth: 2 std-lib-integration + auto-trace-span-extraction + web-framework-integration direct-lib-usage grpc-vs-http diff --git a/docs/web-framework-integration.rst b/docs/web-framework-integration.rst new file mode 100644 index 000000000..d7bc3229d --- /dev/null +++ b/docs/web-framework-integration.rst @@ -0,0 +1,29 @@ +Integration with Python Web Frameworks +====================================== + +The Google Cloud Logging library can integrate with Python web frameworks +`flask `_ and `django `_ to +automatically populate `LogEntry fields `_ +`trace`, `span_id`, `trace_sampled`, and `http_request`. + +Django +------ + +Django integration has been tested to work with each of the Django/Python versions listed `here `_. +To enable Django integration, add `google.cloud.logging_v2.handlers.middleware.RequestMiddleware` to the list of `MIDDLEWARE` +in your `settings `_ file. Also be sure to :doc:`set up logging ` in your settings file. + +Flask +----- + +Flask integration has been tested to work with the following versions of Flask: + +- Python 3.7 - 3.9: + + - Flask >=1.0.0 + +- Python >=3.10: + + - Flask >=1.0.3 + +Be sure to :doc:`set up logging ` before declaring the Flask app. diff --git a/google/__init__.py b/google/__init__.py deleted file mode 100644 index 0e1bc5131..000000000 --- a/google/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py deleted file mode 100644 index 0e1bc5131..000000000 --- a/google/cloud/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/google/cloud/logging/gapic_version.py b/google/cloud/logging/gapic_version.py index d29522314..14833215c 100644 --- a/google/cloud/logging/gapic_version.py +++ b/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.6.0" # {x-release-please-version} +__version__ = "3.12.1" # {x-release-please-version} diff --git a/google/cloud/logging_v2/__init__.py b/google/cloud/logging_v2/__init__.py index 9860f1e06..fac0b7d02 100644 --- a/google/cloud/logging_v2/__init__.py +++ b/google/cloud/logging_v2/__init__.py @@ -36,7 +36,7 @@ ASCENDING = "timestamp asc" """Query string to order by ascending timestamps.""" DESCENDING = "timestamp desc" -"""Query string to order by decending timestamps.""" +"""Query string to order by descending timestamps.""" _instrumentation_emitted = False """Flag for whether instrumentation info has been emitted""" diff --git a/google/cloud/logging_v2/_gapic.py b/google/cloud/logging_v2/_gapic.py index f6f6dca1f..379665248 100644 --- a/google/cloud/logging_v2/_gapic.py +++ b/google/cloud/logging_v2/_gapic.py @@ -30,6 +30,7 @@ from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import ParseDict +from google.protobuf.json_format import ParseError from google.cloud.logging_v2._helpers import entry_from_resource from google.cloud.logging_v2.sink import Sink @@ -151,7 +152,10 @@ def write_entries( Useful for checking whether the logging API endpoints are working properly before sending valuable data. """ - log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + try: + log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries] + except ParseError as e: + raise ValueError(f"Invalid log entry: {str(e)}") from e request = WriteLogEntriesRequest( log_name=logger_name, @@ -271,7 +275,6 @@ def sink_create( return MessageToDict( LogSink.pb(created_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def sink_get(self, sink_name): @@ -298,7 +301,6 @@ def sink_get(self, sink_name): return MessageToDict( LogSink.pb(sink_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def sink_update( @@ -333,7 +335,7 @@ def sink_update( dict: The sink resource returned from the API (converted from a protobuf to a dictionary). """ - name = sink_name.split("/")[-1] # parse name out of full resoure name + name = sink_name.split("/")[-1] # parse name out of full resource name sink_pb = LogSink( name=name, filter=filter_, @@ -351,7 +353,6 @@ def sink_update( return MessageToDict( LogSink.pb(sink_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def sink_delete(self, sink_name): @@ -459,7 +460,6 @@ def metric_get(self, project, metric_name): return MessageToDict( LogMetric.pb(metric_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def metric_update( @@ -496,7 +496,6 @@ def metric_update( return MessageToDict( LogMetric.pb(metric_pb), preserving_proto_field_name=False, - including_default_value_fields=False, ) def metric_delete(self, project, metric_name): @@ -530,7 +529,6 @@ def _parse_log_entry(entry_pb): return MessageToDict( entry_pb, preserving_proto_field_name=False, - including_default_value_fields=False, ) except TypeError: if entry_pb.HasField("proto_payload"): @@ -539,7 +537,6 @@ def _parse_log_entry(entry_pb): entry_mapping = MessageToDict( entry_pb, preserving_proto_field_name=False, - including_default_value_fields=False, ) entry_mapping["protoPayload"] = proto_payload return entry_mapping diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py index 581dce35e..c629b8d92 100644 --- a/google/cloud/logging_v2/_http.py +++ b/google/cloud/logging_v2/_http.py @@ -26,7 +26,6 @@ class Connection(_http.JSONConnection): - DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" def __init__(self, client, *, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): @@ -348,7 +347,7 @@ def sink_update( dict: The returned (updated) resource. """ target = f"/{sink_name}" - name = sink_name.split("/")[-1] # parse name out of full resoure name + name = sink_name.split("/")[-1] # parse name out of full resource name data = {"name": name, "filter": filter_, "destination": destination} query_params = {"uniqueWriterIdentity": unique_writer_identity} return self.api_request( diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index 94c1e6ca7..f52845ee5 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -149,6 +149,8 @@ def __init__( else: self._use_grpc = _use_grpc + self._handlers = set() + @property def logging_api(self): """Helper for logging-related API calls. @@ -400,7 +402,8 @@ def setup_logging( loggers, will report to Cloud Logging. Args: - log_level (Optional[int]): Python logging log level. Defaults to + log_level (Optional[int]): The logging level threshold of the attached logger, + as set by the :meth:`logging.Logger.setLevel` method. Defaults to :const:`logging.INFO`. excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler to. This will always include the @@ -410,4 +413,17 @@ def setup_logging( dict: keyword args passed to handler constructor """ handler = self.get_default_handler(**kw) + self._handlers.add(handler) setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers) + + def flush_handlers(self): + """Flushes all Python log handlers associated with this Client.""" + + for handler in self._handlers: + handler.flush() + + def close(self): + """Closes the Client and all handlers associated with this Client.""" + super(Client, self).close() + for handler in self._handlers: + handler.close() diff --git a/google/cloud/logging_v2/entries.py b/google/cloud/logging_v2/entries.py index 9db020f67..d8a877738 100644 --- a/google/cloud/logging_v2/entries.py +++ b/google/cloud/logging_v2/entries.py @@ -18,9 +18,9 @@ import json import re -from google.protobuf.any_pb2 import Any from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import Parse +from google.protobuf.message import Message from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _name_from_project_path @@ -325,7 +325,7 @@ def _extract_payload(cls, resource): @property def payload_pb(self): - if isinstance(self.payload, Any): + if isinstance(self.payload, Message): return self.payload @property @@ -337,10 +337,10 @@ def to_api_repr(self): """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() proto_payload = None - if self.payload_json: - proto_payload = dict(self.payload_json) - elif self.payload_pb: - proto_payload = MessageToDict(self.payload_pb) + if self.payload_pb: + proto_payload = MessageToDict(self.payload) + elif self.payload_json: + proto_payload = dict(self.payload) info["protoPayload"] = proto_payload return info diff --git a/google/cloud/logging_v2/gapic_metadata.json b/google/cloud/logging_v2/gapic_metadata.json index a629e5a50..8d2b1297a 100644 --- a/google/cloud/logging_v2/gapic_metadata.json +++ b/google/cloud/logging_v2/gapic_metadata.json @@ -20,11 +20,21 @@ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -45,6 +55,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -70,6 +85,11 @@ "get_exclusion" ] }, + "GetLink": { + "methods": [ + "get_link" + ] + }, "GetSettings": { "methods": [ "get_settings" @@ -95,6 +115,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -115,6 +140,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" @@ -155,11 +185,21 @@ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -180,6 +220,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -205,6 +250,11 @@ "get_exclusion" ] }, + "GetLink": { + "methods": [ + "get_link" + ] + }, "GetSettings": { "methods": [ "get_settings" @@ -230,6 +280,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -250,6 +305,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" diff --git a/google/cloud/logging_v2/gapic_version.py b/google/cloud/logging_v2/gapic_version.py index d29522314..14833215c 100644 --- a/google/cloud/logging_v2/gapic_version.py +++ b/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.6.0" # {x-release-please-version} +__version__ = "3.12.1" # {x-release-please-version} diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py index 32e70dfdd..ff5838e05 100644 --- a/google/cloud/logging_v2/handlers/_helpers.py +++ b/google/cloud/logging_v2/handlers/_helpers.py @@ -24,6 +24,8 @@ except ImportError: # pragma: NO COVER flask = None +import opentelemetry.trace + from google.cloud.logging_v2.handlers.middleware.request import _get_django_request _DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH" @@ -66,7 +68,7 @@ def get_request_data_from_flask(): Returns: Tuple[Optional[dict], Optional[str], Optional[str], bool]: Data related to the current http request, trace_id, span_id and trace_sampled - for the request. All fields will be None if a django request isn't found. + for the request. All fields will be None if a Flask request isn't found. """ if flask is None or not flask.request: return None, None, None, False @@ -104,10 +106,17 @@ def get_request_data_from_django(): if request is None: return None, None, None, False + # Django can raise django.core.exceptions.DisallowedHost here for a + # malformed HTTP_HOST header. But we don't want to import Django modules. + try: + request_url = request.build_absolute_uri() + except Exception: + request_url = None + # build http_request http_request = { "requestMethod": request.method, - "requestUrl": request.build_absolute_uri(), + "requestUrl": request_url, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } @@ -165,13 +174,22 @@ def _parse_xcloud_trace(header): Args: header (str): the string extracted from the X_CLOUD_TRACE header Returns: - Tuple[Optional[dict], Optional[str], bool]: + Tuple[Optional[str], Optional[str], bool]: The trace_id, span_id and trace_sampled extracted from the header Each field will be None if not found. """ trace_id = span_id = None trace_sampled = False - # see https://cloud.google.com/trace/docs/setup for X-Cloud-Trace_Context format + + # As per the format described at https://cloud.google.com/trace/docs/trace-context#legacy-http-header + # "X-Cloud-Trace-Context: TRACE_ID[/SPAN_ID][;o=OPTIONS]" + # for example: + # "X-Cloud-Trace-Context: 105445aa7843bc8bf206b12000100000/1;o=1" + # + # We expect: + # * trace_id (optional, 128-bit hex string): "105445aa7843bc8bf206b12000100000" + # * span_id (optional, 16-bit hex string): "0000000000000001" (needs to be converted into 16 bit hex string) + # * trace_sampled (optional, bool): true if header: try: regex = r"([\w-]+)?(\/?([\w-]+))?(;?o=(\d))?" @@ -179,28 +197,87 @@ def _parse_xcloud_trace(header): trace_id = match.group(1) span_id = match.group(3) trace_sampled = match.group(5) == "1" + + # Convert the span ID to 16-bit hexadecimal instead of decimal + try: + span_id_int = int(span_id) + if span_id_int > 0 and span_id_int < 2**64: + span_id = f"{span_id_int:016x}" + else: + span_id = None + except (ValueError, TypeError): + span_id = None + except IndexError: pass return trace_id, span_id, trace_sampled +def _retrieve_current_open_telemetry_span(): + """Helper to retrieve trace, span ID, and trace sampled information from the current + OpenTelemetry span. + + Returns: + Tuple[Optional[str], Optional[str], bool]: + Data related to the current trace_id, span_id, and trace_sampled for the + current OpenTelemetry span. If a span is not found, return None/False for all + fields. + """ + span = opentelemetry.trace.get_current_span() + if span != opentelemetry.trace.span.INVALID_SPAN: + context = span.get_span_context() + trace_id = opentelemetry.trace.format_trace_id(context.trace_id) + span_id = opentelemetry.trace.format_span_id(context.span_id) + trace_sampled = context.trace_flags.sampled + + return trace_id, span_id, trace_sampled + + return None, None, False + + def get_request_data(): """Helper to get http_request and trace data from supported web - frameworks (currently supported: Flask and Django). + frameworks (currently supported: Flask and Django), as well as OpenTelemetry. Attempts + to retrieve trace/spanID from OpenTelemetry first, before going to Traceparent then XCTC. + HTTP request data is taken from a supporting web framework (currently Flask or Django). + Because HTTP request data is decoupled from OpenTelemetry, it is possible to get as a + return value the HTTP request from the web framework of choice, and trace/span data from + OpenTelemetry, even if trace data is present in the HTTP request headers. Returns: Tuple[Optional[dict], Optional[str], Optional[str], bool]: Data related to the current http request, trace_id, span_id, and trace_sampled for the request. All fields will be None if a http request isn't found. """ + + ( + otel_trace_id, + otel_span_id, + otel_trace_sampled, + ) = _retrieve_current_open_telemetry_span() + + # Get HTTP request data checkers = ( get_request_data_from_django, get_request_data_from_flask, ) - for checker in checkers: - http_request, trace_id, span_id, trace_sampled = checker() - if http_request is not None: - return http_request, trace_id, span_id, trace_sampled + http_request, http_trace_id, http_span_id, http_trace_sampled = ( + None, + None, + None, + False, + ) - return None, None, None, False + for checker in checkers: + http_request, http_trace_id, http_span_id, http_trace_sampled = checker() + if http_request is None: + http_trace_id, http_span_id, http_trace_sampled = None, None, False + else: + break + + # otel_trace_id existing means the other return values are non-null + if otel_trace_id: + return http_request, otel_trace_id, otel_span_id, otel_trace_sampled + else: + return http_request, http_trace_id, http_span_id, http_trace_sampled diff --git a/google/cloud/logging_v2/handlers/_monitored_resources.py b/google/cloud/logging_v2/handlers/_monitored_resources.py index a5b8dfee3..5240fe746 100644 --- a/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools +import logging import os from google.cloud.logging_v2.resource import Resource @@ -26,11 +28,21 @@ _CLOUD_RUN_SERVICE_ID = "K_SERVICE" _CLOUD_RUN_REVISION_ID = "K_REVISION" _CLOUD_RUN_CONFIGURATION_ID = "K_CONFIGURATION" -_CLOUD_RUN_ENV_VARS = [ +_CLOUD_RUN_SERVICE_ENV_VARS = [ _CLOUD_RUN_SERVICE_ID, _CLOUD_RUN_REVISION_ID, _CLOUD_RUN_CONFIGURATION_ID, ] +_CLOUD_RUN_JOB_ID = "CLOUD_RUN_JOB" +_CLOUD_RUN_EXECUTION_ID = "CLOUD_RUN_EXECUTION" +_CLOUD_RUN_TASK_INDEX = "CLOUD_RUN_TASK_INDEX" +_CLOUD_RUN_TASK_ATTEMPT = "CLOUD_RUN_TASK_ATTEMPT" +_CLOUD_RUN_JOB_ENV_VARS = [ + _CLOUD_RUN_JOB_ID, + _CLOUD_RUN_EXECUTION_ID, + _CLOUD_RUN_TASK_INDEX, + _CLOUD_RUN_TASK_ATTEMPT, +] """Environment variables set in Cloud Run environment.""" _FUNCTION_TARGET = "FUNCTION_TARGET" @@ -51,9 +63,26 @@ _GKE_CLUSTER_NAME = "instance/attributes/cluster-name" """Attribute in metadata server when in GKE environment.""" +_GKE_CLUSTER_LOCATION = "instance/attributes/cluster-location" +"""Attribute in metadata server when in GKE environment.""" + _PROJECT_NAME = "project/project-id" """Attribute in metadata server when in GKE environment.""" +_GAE_RESOURCE_TYPE = "gae_app" +"""Resource type for App Engine environment.""" + +_CLOUD_RUN_JOB_RESOURCE_TYPE = "cloud_run_job" +"""Resource type for Cloud Run Jobs.""" + +_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" +"""Extra trace label to be added on App Engine environments""" + +_CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL = "run.googleapis.com/execution_name" +_CLOUD_RUN_JOBS_TASK_INDEX_LABEL = "run.googleapis.com/task_index" +_CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL = "run.googleapis.com/task_attempt" +"""Extra labels for Cloud Run environments to be recognized by Cloud Run Jobs web UI.""" + def _create_functions_resource(): """Create a standardized Cloud Functions resource. @@ -84,7 +113,7 @@ def _create_kubernetes_resource(): Returns: google.cloud.logging.Resource """ - zone = retrieve_metadata_server(_ZONE_ID) + location = retrieve_metadata_server(_GKE_CLUSTER_LOCATION) cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME) project = retrieve_metadata_server(_PROJECT_NAME) @@ -92,7 +121,7 @@ def _create_kubernetes_resource(): type="k8s_container", labels={ "project_id": project if project else "", - "location": zone if zone else "", + "location": location if location else "", "cluster_name": cluster_name if cluster_name else "", }, ) @@ -118,8 +147,8 @@ def _create_compute_resource(): return resource -def _create_cloud_run_resource(): - """Create a standardized Cloud Run resource. +def _create_cloud_run_service_resource(): + """Create a standardized Cloud Run service resource. Returns: google.cloud.logging.Resource """ @@ -138,6 +167,24 @@ def _create_cloud_run_resource(): return resource +def _create_cloud_run_job_resource(): + """Create a standardized Cloud Run job resource. + Returns: + google.cloud.logging.Resource + """ + region = retrieve_metadata_server(_REGION_ID) + project = retrieve_metadata_server(_PROJECT_NAME) + resource = Resource( + type=_CLOUD_RUN_JOB_RESOURCE_TYPE, + labels={ + "project_id": project if project else "", + "job_name": os.environ.get(_CLOUD_RUN_JOB_ID, ""), + "location": region.split("/")[-1] if region else "", + }, + ) + return resource + + def _create_app_engine_resource(): """Create a standardized App Engine resource. Returns: @@ -146,7 +193,7 @@ def _create_app_engine_resource(): zone = retrieve_metadata_server(_ZONE_ID) project = retrieve_metadata_server(_PROJECT_NAME) resource = Resource( - type="gae_app", + type=_GAE_RESOURCE_TYPE, labels={ "project_id": project if project else "", "module_id": os.environ.get(_GAE_SERVICE_ENV, ""), @@ -190,12 +237,67 @@ def detect_resource(project=""): ): # Cloud Functions return _create_functions_resource() - elif all([env in os.environ for env in _CLOUD_RUN_ENV_VARS]): + elif all([env in os.environ for env in _CLOUD_RUN_SERVICE_ENV_VARS]): + # Cloud Run + return _create_cloud_run_service_resource() + elif all([env in os.environ for env in _CLOUD_RUN_JOB_ENV_VARS]): # Cloud Run - return _create_cloud_run_resource() + return _create_cloud_run_job_resource() elif gce_instance_name is not None: # Compute Engine return _create_compute_resource() else: # use generic global resource return _create_global_resource(project) + + +@functools.lru_cache(maxsize=None) +def _get_environmental_labels(resource_type): + """Builds a dictionary of labels to be inserted into a LogRecord of the given resource type. + This function should only build a dict of items that are consistent across multiple logging statements + of the same resource type, such as environment variables. Th + + Returns: + dict: + A dict representation of labels and the values of those labels + """ + labels = {} + environ_vars = { + _CLOUD_RUN_JOB_RESOURCE_TYPE: { + _CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL: _CLOUD_RUN_EXECUTION_ID, + _CLOUD_RUN_JOBS_TASK_INDEX_LABEL: _CLOUD_RUN_TASK_INDEX, + _CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL: _CLOUD_RUN_TASK_ATTEMPT, + } + } + + if resource_type in environ_vars: + for key, env_var in environ_vars[resource_type].items(): + val = os.environ.get(env_var, "") + if val: + labels[key] = val + + return labels + + +def add_resource_labels(resource: Resource, record: logging.LogRecord): + """Returns additional labels to be appended on to a LogRecord object based on the + local environment. Defaults to an empty dictionary if none apply. This is only to be + used for CloudLoggingHandler, as the structured logging daemon already does this. + + Args: + resource (google.cloud.logging.Resource): Resource based on the environment + record (logging.LogRecord): A LogRecord object representing a log record + Returns: + Dict[str, str]: New labels to append to the labels of the LogRecord + """ + if not resource: + return None + + # Get environmental labels from the resource type + labels = _get_environmental_labels(resource.type) + + # Add labels from log record + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: + labels[_GAE_TRACE_ID_LABEL] = record._trace + + return labels diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 28960ae71..233d9eab3 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -18,30 +18,38 @@ import json import logging -from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport -from google.cloud.logging_v2.handlers._monitored_resources import detect_resource +from typing import Optional, IO, Type + +from google.cloud.logging_v2.handlers.transports import ( + BackgroundThreadTransport, + Transport, +) +from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + add_resource_labels, +) from google.cloud.logging_v2.handlers._helpers import get_request_data +from google.cloud.logging_v2.resource import Resource + DEFAULT_LOGGER_NAME = "python" -"""Exclude internal logs from propagating through handlers""" +"""Defaults for filtering out noisy loggers""" EXCLUDED_LOGGER_DEFAULTS = ( + "google.api_core.bidi", + "werkzeug", +) + +"""Exclude internal logs from propagating through handlers""" +_INTERNAL_LOGGERS = ( "google.cloud", "google.auth", "google_auth_httplib2", - "google.api_core.bidi", - "werkzeug", ) """These environments require us to remove extra handlers on setup""" _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") -"""Extra trace label to be added on App Engine environments""" -_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" - -"""Resource name for App Engine environments""" -_GAE_RESOURCE_TYPE = "gae_app" - class CloudLoggingFilter(logging.Filter): """Python standard ``logging`` Filter class to add Cloud Logging @@ -70,7 +78,7 @@ def _infer_source_location(record): ("function", "funcName"), ] output = {} - for (gcp_name, std_lib_name) in name_map: + for gcp_name, std_lib_name in name_map: value = getattr(record, std_lib_name, None) if value is not None: output[gcp_name] = value @@ -148,11 +156,12 @@ def __init__( self, client, *, - name=DEFAULT_LOGGER_NAME, - transport=BackgroundThreadTransport, - resource=None, - labels=None, - stream=None, + name: str = DEFAULT_LOGGER_NAME, + transport: Type[Transport] = BackgroundThreadTransport, + resource: Resource = None, + labels: Optional[dict] = None, + stream: Optional[IO] = None, + **kwargs, ): """ Args: @@ -179,7 +188,10 @@ def __init__( resource = detect_resource(client.project) self.name = name self.client = client + client._handlers.add(self) self.transport = transport(client, name, resource=resource) + self._transport_open = True + self._transport_cls = transport self.project_id = client.project self.resource = resource self.labels = labels @@ -201,10 +213,15 @@ def emit(self, record): labels = record._labels message = _format_and_parse_message(record, self) - if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: - # add GAE-specific label - labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} + labels = {**add_resource_labels(resource, record), **(labels or {})} or None + # send off request + if not self._transport_open: + self.transport = self._transport_cls( + self.client, self.name, resource=self.resource + ) + self._transport_open = True + self.transport.send( record, message, @@ -217,6 +234,22 @@ def emit(self, record): source_location=record._source_location, ) + def flush(self): + """Forces the Transport object to submit any pending log records. + + For SyncTransport, this is a no-op. + """ + super(CloudLoggingHandler, self).flush() + if self._transport_open: + self.transport.flush() + + def close(self): + """Closes the log handler and cleans up all Transport objects used.""" + if self._transport_open: + self.transport.close() + self.transport = None + self._transport_open = False + def _format_and_parse_message(record, formatter_handler): """ @@ -288,10 +321,11 @@ def setup_logging( excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler to. This will always include the loggers in the path of the logging client itself. - log_level (Optional[int]): Python logging log level. Defaults to + log_level (Optional[int]): The logging level threshold of the attached logger, + as set by the :meth:`logging.Logger.setLevel` method. Defaults to :const:`logging.INFO`. """ - all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) + all_excluded_loggers = set(excluded_loggers + _INTERNAL_LOGGERS) logger = logging.getLogger() # remove built-in handlers on App Engine or Cloud Functions environments diff --git a/google/cloud/logging_v2/handlers/structured_log.py b/google/cloud/logging_v2/handlers/structured_log.py index fac9b26b3..dcba02c9c 100644 --- a/google/cloud/logging_v2/handlers/structured_log.py +++ b/google/cloud/logging_v2/handlers/structured_log.py @@ -63,7 +63,13 @@ class StructuredLogHandler(logging.StreamHandler): """ def __init__( - self, *, labels=None, stream=None, project_id=None, json_encoder_cls=None + self, + *, + labels=None, + stream=None, + project_id=None, + json_encoder_cls=None, + **kwargs ): """ Args: @@ -79,8 +85,18 @@ def __init__( log_filter = CloudLoggingFilter(project=project_id, default_labels=labels) self.addFilter(log_filter) + class _Formatter(logging.Formatter): + """Formatter to format log message without traceback""" + + def format(self, record): + """Ignore exception info to avoid duplicating it + https://github.com/googleapis/python-logging/issues/382 + """ + record.message = record.getMessage() + return self.formatMessage(record) + # make logs appear in GCP structured logging format - self._gcp_formatter = logging.Formatter(GCP_FORMAT) + self._gcp_formatter = _Formatter(GCP_FORMAT) self._json_encoder_cls = json_encoder_cls or json.JSONEncoder @@ -115,11 +131,7 @@ def format(self, record): payload = '"message": {},'.format(encoded_message) record._payload_str = payload or "" - # remove exception info to avoid duplicating it - # https://github.com/googleapis/python-logging/issues/382 - record.exc_info = None - record.exc_text = None - # convert to GCP structred logging format + # convert to GCP structured logging format gcp_payload = self._gcp_formatter.format(record) return gcp_payload diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py index f361e043c..021112fdb 100644 --- a/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -38,6 +38,13 @@ _WORKER_TERMINATOR = object() _LOGGER = logging.getLogger(__name__) +_CLOSE_THREAD_SHUTDOWN_ERROR_MSG = ( + "CloudLoggingHandler shutting down, cannot send logs entries to Cloud Logging due to " + "inconsistent threading behavior at shutdown. To avoid this issue, flush the logging handler " + "manually or switch to StructuredLogHandler. You can also close the CloudLoggingHandler manually " + "via handler.close or client.close." +) + def _get_many(queue_, *, max_items=None, max_latency=0): """Get multiple items from a Queue. @@ -140,9 +147,11 @@ def _thread_main(self): else: batch.log(**item) - self._safely_commit_batch(batch) + # We cannot commit logs upstream if the main thread is shutting down + if threading.main_thread().is_alive(): + self._safely_commit_batch(batch) - for _ in items: + for it in items: self._queue.task_done() _LOGGER.debug("Background thread exited gracefully.") @@ -162,7 +171,7 @@ def start(self): ) self._thread.daemon = True self._thread.start() - atexit.register(self._main_thread_terminated) + atexit.register(self._handle_exit) def stop(self, *, grace_period=None): """Signals the background thread to stop. @@ -202,26 +211,26 @@ def stop(self, *, grace_period=None): return success - def _main_thread_terminated(self): - """Callback that attempts to send pending logs before termination.""" + def _close(self, close_msg): + """Callback that attempts to send pending logs before termination if the main thread is alive.""" if not self.is_alive: return if not self._queue.empty(): - print( - "Program shutting down, attempting to send %d queued log " - "entries to Cloud Logging..." % (self._queue.qsize(),), - file=sys.stderr, - ) + print(close_msg, file=sys.stderr) - if self.stop(grace_period=self._grace_period): + if threading.main_thread().is_alive() and self.stop( + grace_period=self._grace_period + ): print("Sent all pending logs.", file=sys.stderr) - else: + elif not self._queue.empty(): print( "Failed to send %d pending logs." % (self._queue.qsize(),), file=sys.stderr, ) + self._thread = None + def enqueue(self, record, message, **kwargs): """Queues a log entry to be written by the background thread. @@ -240,7 +249,9 @@ def enqueue(self, record, message, **kwargs): queue_entry = { "message": message, "severity": _helpers._normalize_severity(record.levelno), - "timestamp": datetime.datetime.utcfromtimestamp(record.created), + "timestamp": datetime.datetime.fromtimestamp( + record.created, datetime.timezone.utc + ), } queue_entry.update(kwargs) self._queue.put_nowait(queue_entry) @@ -249,6 +260,26 @@ def flush(self): """Submit any pending log records.""" self._queue.join() + def close(self): + """Signals the worker thread to stop, then closes the transport thread. + + This call will attempt to send pending logs before termination, and + should be followed up by disowning the transport object. + """ + atexit.unregister(self._handle_exit) + self._close( + "Background thread shutting down, attempting to send %d queued log " + "entries to Cloud Logging..." % (self._queue.qsize(),) + ) + + def _handle_exit(self): + """Handle system exit. + + Since we cannot send pending logs during system shutdown due to thread errors, + log an error message to stderr to notify the user. + """ + self._close(_CLOSE_THREAD_SHUTDOWN_ERROR_MSG) + class BackgroundThreadTransport(Transport): """Asynchronous transport that uses a background thread.""" @@ -283,6 +314,7 @@ def __init__( """ self.client = client logger = self.client.logger(name, resource=resource) + self.grace_period = grace_period self.worker = _Worker( logger, grace_period=grace_period, @@ -305,3 +337,7 @@ def send(self, record, message, **kwargs): def flush(self): """Submit any pending log records.""" self.worker.flush() + + def close(self): + """Closes the worker thread.""" + self.worker.close() diff --git a/google/cloud/logging_v2/handlers/transports/base.py b/google/cloud/logging_v2/handlers/transports/base.py index a0c9aafa4..31e8f418a 100644 --- a/google/cloud/logging_v2/handlers/transports/base.py +++ b/google/cloud/logging_v2/handlers/transports/base.py @@ -51,3 +51,11 @@ def flush(self): For blocking/sync transports, this is a no-op. """ + pass + + def close(self): + """Closes the transport and cleans up resources used by it. + + This call should be followed up by disowning the transport. + """ + pass diff --git a/google/cloud/logging_v2/handlers/transports/sync.py b/google/cloud/logging_v2/handlers/transports/sync.py index 6f93b2e57..6bf91f8da 100644 --- a/google/cloud/logging_v2/handlers/transports/sync.py +++ b/google/cloud/logging_v2/handlers/transports/sync.py @@ -14,7 +14,7 @@ """Transport for Python logging handler. -Logs directly to the the Cloud Logging API with a synchronous call. +Logs directly to the Cloud Logging API with a synchronous call. """ from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport @@ -59,3 +59,10 @@ def send(self, record, message, **kwargs): labels=labels, **kwargs, ) + + def close(self): + """Closes the transport and cleans up resources used by it. + + This call is usually followed up by cleaning up the reference to the transport. + """ + self.logger = None diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index 88424b27c..eaa8d2d36 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -29,6 +29,7 @@ from google.api_core.exceptions import InvalidArgument from google.rpc.error_details_pb2 import DebugInfo +import google.cloud.logging_v2 import google.protobuf.message _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -161,6 +162,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): api_repr = entry.to_api_repr() entries = [api_repr] + if google.cloud.logging_v2._instrumentation_emitted is False: entries = _add_instrumentation(entries, **kw) google.cloud.logging_v2._instrumentation_emitted = True @@ -199,18 +201,38 @@ def log_text(self, text, *, client=None, **kw): self._do_log(client, TextEntry, text, **kw) def log_struct(self, info, *, client=None, **kw): - """Log a dictionary message + """Logs a dictionary message. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write + The message must be able to be serializable to a Protobuf Struct. + It must be a dictionary of strings to one of the following: + + - :class:`str` + - :class:`int` + - :class:`float` + - :class:`bool` + - :class:`list[str|float|int|bool|list|dict|None]` + - :class:`dict[str, str|float|int|bool|list|dict|None]` + + For more details on Protobuf structs, see https://protobuf.dev/reference/protobuf/google.protobuf/#value. + If the provided dictionary cannot be serialized into a Protobuf struct, + it will not be logged, and a :class:`ValueError` will be raised. + Args: - info (dict): the log entry information + info (dict[str, str|float|int|bool|list|dict|None]): + the log entry information. client (Optional[~logging_v2.client.Client]): The client to use. If not passed, falls back to the ``client`` stored on the current sink. kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. + + Raises: + ValueError: + if the dictionary message provided cannot be serialized into a Protobuf + struct. """ for field in _STRUCT_EXTRACTABLE_FIELDS: # attempt to copy relevant fields from the payload into the LogEntry body @@ -359,7 +381,7 @@ def __init__(self, logger, client, *, resource=None): Args: logger (logging_v2.logger.Logger): the logger to which entries will be logged. - client (~logging_V2.client.Cilent): + client (~logging_V2.client.Client): The client to use. resource (Optional[~logging_v2.resource.Resource]): Monitored resource of the batch, defaults @@ -404,8 +426,22 @@ def log_text(self, text, **kw): def log_struct(self, info, **kw): """Add a struct entry to be logged during :meth:`commit`. + The message must be able to be serializable to a Protobuf Struct. + It must be a dictionary of strings to one of the following: + + - :class:`str` + - :class:`int` + - :class:`float` + - :class:`bool` + - :class:`list[str|float|int|bool|list|dict|None]` + - :class:`dict[str, str|float|int|bool|list|dict|None]` + + For more details on Protobuf structs, see https://protobuf.dev/reference/protobuf/google.protobuf/#value. + If the provided dictionary cannot be serialized into a Protobuf struct, + it will not be logged, and a :class:`ValueError` will be raised during :meth:`commit`. + Args: - info (dict): The struct entry, + info (dict[str, str|float|int|bool|list|dict|None]): The struct entry, kw (Optional[dict]): Additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ @@ -450,6 +486,10 @@ def commit(self, *, client=None, partial_success=True): Whether a batch's valid entries should be written even if some other entry failed due to a permanent error such as INVALID_ARGUMENT or PERMISSION_DENIED. + + Raises: + ValueError: + if one of the messages in the batch cannot be successfully parsed. """ if client is None: client = self.client diff --git a/google/cloud/logging_v2/services/__init__.py b/google/cloud/logging_v2/services/__init__.py index e8e1c3845..cbf94b283 100644 --- a/google/cloud/logging_v2/services/__init__.py +++ b/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/__init__.py b/google/cloud/logging_v2/services/config_service_v2/__init__.py index 6eb3681ce..187d00d52 100644 --- a/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 7549eea48..ad681a9c7 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict -import functools import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -33,38 +34,57 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf + try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class ConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" _client: ConfigServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ConfigServiceV2Client._DEFAULT_UNIVERSE cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) parse_cmek_settings_path = staticmethod( ConfigServiceV2Client.parse_cmek_settings_path ) + link_path = staticmethod(ConfigServiceV2Client.link_path) + parse_link_path = staticmethod(ConfigServiceV2Client.parse_link_path) log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) @@ -180,19 +200,40 @@ def transport(self) -> ConfigServiceV2Transport: """ return self._client.transport - get_transport_class = functools.partial( - type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client) - ) + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ConfigServiceV2Client.get_transport_class def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the config service v2 client. + """Instantiates the config service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -200,26 +241,43 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -231,6 +289,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + }, + ) + async def list_buckets( self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, @@ -238,7 +318,7 @@ async def list_buckets( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. @@ -290,31 +370,40 @@ async def sample_list_buckets(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.ListBucketsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -323,11 +412,9 @@ async def sample_list_buckets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_buckets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_buckets + ] # Certain fields should be provided within the metadata header; # add these here. @@ -335,6 +422,9 @@ async def sample_list_buckets(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -349,6 +439,8 @@ async def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -361,7 +453,7 @@ async def get_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -394,11 +486,13 @@ async def sample_get_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): The request object. The parameters to ``GetBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -407,22 +501,228 @@ async def sample_get_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_bucket + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket_async + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, ) + # Done; return the response. + return response + + async def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket_async + ] + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -431,6 +731,14 @@ async def sample_get_bucket(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + # Done; return the response. return response @@ -440,7 +748,7 @@ async def create_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -476,11 +784,13 @@ async def sample_create_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -489,15 +799,16 @@ async def sample_create_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -505,6 +816,9 @@ async def sample_create_bucket(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -522,14 +836,9 @@ async def update_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: - r"""Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -566,11 +875,13 @@ async def sample_update_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -579,15 +890,16 @@ async def sample_update_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -595,6 +907,9 @@ async def sample_update_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -612,7 +927,7 @@ async def delete_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -647,22 +962,25 @@ async def sample_delete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): The request object. The parameters to ``DeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -670,6 +988,9 @@ async def sample_delete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -684,7 +1005,7 @@ async def undelete_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -716,22 +1037,25 @@ async def sample_undelete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): The request object. The parameters to ``UndeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.undelete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -739,6 +1063,9 @@ async def sample_undelete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -754,7 +1081,7 @@ async def list_views( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. @@ -798,31 +1125,40 @@ async def sample_list_views(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.ListViewsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -831,11 +1167,9 @@ async def sample_list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_views, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_views + ] # Certain fields should be provided within the metadata header; # add these here. @@ -843,6 +1177,9 @@ async def sample_list_views(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -857,6 +1194,8 @@ async def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -869,7 +1208,7 @@ async def get_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -902,11 +1241,13 @@ async def sample_get_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): The request object. The parameters to ``GetView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -915,15 +1256,14 @@ async def sample_get_view(): """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_view] # Certain fields should be provided within the metadata header; # add these here. @@ -931,6 +1271,9 @@ async def sample_get_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -948,7 +1291,7 @@ async def create_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -983,11 +1326,13 @@ async def sample_create_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): The request object. The parameters to ``CreateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -996,15 +1341,16 @@ async def sample_create_view(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1012,6 +1358,9 @@ async def sample_create_view(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1029,7 +1378,7 @@ async def update_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1066,11 +1415,13 @@ async def sample_update_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): The request object. The parameters to ``UpdateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1079,15 +1430,16 @@ async def sample_update_view(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1095,6 +1447,9 @@ async def sample_update_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1112,7 +1467,7 @@ async def delete_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1145,22 +1500,25 @@ async def sample_delete_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): The request object. The parameters to ``DeleteView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1168,6 +1526,9 @@ async def sample_delete_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1183,7 +1544,7 @@ async def list_sinks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1231,11 +1592,13 @@ async def sample_list_sinks(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: @@ -1246,16 +1609,22 @@ async def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.ListSinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1264,22 +1633,9 @@ async def sample_list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sinks, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sinks + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1287,6 +1643,9 @@ async def sample_list_sinks(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1301,6 +1660,8 @@ async def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1314,7 +1675,7 @@ async def get_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1364,11 +1725,13 @@ async def sample_get_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1384,16 +1747,22 @@ async def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.GetSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1402,22 +1771,7 @@ async def sample_get_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_sink, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1427,6 +1781,9 @@ async def sample_get_sink(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1446,7 +1803,7 @@ async def create_sink( sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -1512,11 +1869,13 @@ async def sample_create_sink(): This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1532,16 +1891,22 @@ async def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, sink] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.CreateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1552,11 +1917,9 @@ async def sample_create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_sink, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1564,6 +1927,9 @@ async def sample_create_sink(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1584,7 +1950,7 @@ async def update_sink( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -1674,11 +2040,13 @@ async def sample_update_sink(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1694,16 +2062,22 @@ async def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.UpdateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1716,22 +2090,9 @@ async def sample_update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_sink, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1741,6 +2102,9 @@ async def sample_update_sink(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1759,7 +2123,7 @@ async def delete_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -1808,23 +2172,31 @@ async def sample_delete_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.DeleteSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1833,22 +2205,9 @@ async def sample_delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_sink, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1858,6 +2217,9 @@ async def sample_delete_sink(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1866,17 +2228,21 @@ async def sample_delete_sink(): metadata=metadata, ) - async def list_exclusions( + async def create_link( self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions on the \_Default sink in a parent - resource. + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -1889,44 +2255,576 @@ async def list_exclusions( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_list_exclusions(): + async def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): - The request object. The parameters to ``ListExclusions``. + request (Optional[Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]]): + The request object. The parameters to CreateLink. parent (:class:`str`): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link (:class:`google.cloud.logging_v2.types.Link`): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (:class:`str`): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, link, link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + async def delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]]): + The request object. The parameters to DeleteLink. + name (:class:`str`): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + async def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksAsyncPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLinksRequest, dict]]): + The request object. The parameters to ListLinks. + parent (:class:`str`): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetLinkRequest, dict]]): + The request object. The parameters to GetLink. + name (:class:`str`): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): + The request object. The parameters to ``ListExclusions``. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: @@ -1937,16 +2835,22 @@ async def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.ListExclusionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1955,22 +2859,9 @@ async def sample_list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_exclusions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1978,6 +2869,9 @@ async def sample_list_exclusions(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1992,6 +2886,8 @@ async def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2005,7 +2901,7 @@ async def get_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2055,11 +2951,13 @@ async def sample_get_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2073,16 +2971,22 @@ async def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.GetExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2091,22 +2995,9 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2114,6 +3005,9 @@ async def sample_get_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2133,7 +3027,7 @@ async def create_exclusion( exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -2200,11 +3094,13 @@ async def sample_create_exclusion(): This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2218,16 +3114,22 @@ async def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, exclusion] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.CreateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2238,11 +3140,9 @@ async def sample_create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2250,6 +3150,9 @@ async def sample_create_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2270,7 +3173,7 @@ async def update_exclusion( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -2348,11 +3251,13 @@ async def sample_update_exclusion(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2366,16 +3271,22 @@ async def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, exclusion, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.UpdateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2388,11 +3299,9 @@ async def sample_update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2400,6 +3309,9 @@ async def sample_update_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2418,7 +3330,7 @@ async def delete_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -2466,23 +3378,31 @@ async def sample_delete_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.DeleteExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2491,22 +3411,9 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2514,6 +3421,9 @@ async def sample_delete_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -2528,7 +3438,7 @@ async def get_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -2575,11 +3485,13 @@ async def sample_get_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -2598,15 +3510,16 @@ async def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2614,6 +3527,9 @@ async def sample_get_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2631,7 +3547,7 @@ async def update_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -2683,11 +3599,13 @@ async def sample_update_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -2706,15 +3624,16 @@ async def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2722,6 +3641,9 @@ async def sample_update_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2740,7 +3662,7 @@ async def get_settings( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -2812,11 +3734,13 @@ async def sample_get_settings(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -2826,16 +3750,22 @@ async def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.GetSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2844,11 +3774,9 @@ async def sample_get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2856,6 +3784,9 @@ async def sample_get_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2875,7 +3806,7 @@ async def update_settings( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -2954,11 +3885,13 @@ async def sample_update_settings(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -2968,16 +3901,22 @@ async def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [settings, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_config.UpdateSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2988,11 +3927,9 @@ async def sample_update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3000,6 +3937,9 @@ async def sample_update_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3017,7 +3957,7 @@ async def copy_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3056,11 +3996,13 @@ async def sample_copy_log_entries(): Args: request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): The request object. The parameters to CopyLogEntries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -3073,15 +4015,19 @@ async def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - request = logging_config.CopyLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.copy_log_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.copy_log_entries + ] + + # Validate the universe domain. + self._client._validate_universe_domain() # Send the request. response = await rpc( @@ -3102,7 +4048,172 @@ async def sample_copy_log_entries(): # Done; return the response. return response - async def __aenter__(self): + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): @@ -3113,5 +4224,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("ConfigServiceV2AsyncClient",) diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index c76b46fa9..6c97c6556 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,10 +14,14 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -28,6 +32,7 @@ Union, cast, ) +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -40,16 +45,28 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -126,11 +143,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -193,6 +214,30 @@ def parse_cmek_settings_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} + @staticmethod + def link_path( + project: str, + location: str, + bucket: str, + link: str, + ) -> str: + """Returns a fully-qualified link string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) + + @staticmethod + def parse_link_path(path: str) -> Dict[str, str]: + """Parses a link path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def log_bucket_path( project: str, @@ -369,7 +414,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -399,6 +444,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -432,11 +482,180 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ConfigServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -448,25 +667,37 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -477,17 +708,38 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ConfigServiceV2Client._read_environment_variables() + self._client_cert_source = ConfigServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert ) + self._universe_domain = ConfigServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -496,20 +748,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, ConfigServiceV2Transport): + transport_provided = isinstance(transport, ConfigServiceV2Transport) + if transport_provided: # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(ConfigServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ConfigServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -519,19 +784,49 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) - self._transport = Transport( + transport_init: Union[ + Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport] + ] = ( + ConfigServiceV2Client.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ConfigServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2Client`.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + }, + ) + def list_buckets( self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, @@ -539,7 +834,7 @@ def list_buckets( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsPager: r"""Lists log buckets. @@ -594,31 +889,35 @@ def sample_list_buckets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListBucketsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -636,6 +935,9 @@ def sample_list_buckets(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -650,6 +952,8 @@ def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -662,7 +966,7 @@ def get_bucket( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -698,8 +1002,10 @@ def sample_get_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -708,10 +1014,8 @@ def sample_get_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetBucketRequest): request = logging_config.GetBucketRequest(request) @@ -725,6 +1029,9 @@ def sample_get_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -736,17 +1043,18 @@ def sample_get_bucket(): # Done; return the response. return response - def create_bucket( + def create_bucket_async( self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Creates a log bucket that can be used to store log - entries. After a bucket has been created, the bucket's - location cannot be changed. + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. .. code-block:: python @@ -759,7 +1067,7 @@ def create_bucket( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_create_bucket(): + def sample_create_bucket_async(): # Create a client client = logging_v2.ConfigServiceV2Client() @@ -770,7 +1078,11 @@ def sample_create_bucket(): ) # Make the request - response = client.create_bucket(request=request) + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) @@ -781,26 +1093,29 @@ def sample_create_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository in which log - entries are stored. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_bucket] + rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] # Certain fields should be provided within the metadata header; # add these here. @@ -808,6 +1123,9 @@ def sample_create_bucket(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -816,23 +1134,26 @@ def sample_create_bucket(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + # Done; return the response. return response - def update_bucket( + def update_bucket_async( self, request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogBucket: - r"""Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -851,7 +1172,7 @@ def update_bucket( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_update_bucket(): + def sample_update_bucket_async(): # Create a client client = logging_v2.ConfigServiceV2Client() @@ -861,7 +1182,11 @@ def sample_update_bucket(): ) # Make the request - response = client.update_bucket(request=request) + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) @@ -872,26 +1197,29 @@ def sample_update_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: - google.cloud.logging_v2.types.LogBucket: - Describes a repository in which log - entries are stored. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_bucket] + rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] # Certain fields should be provided within the metadata header; # add these here. @@ -899,6 +1227,9 @@ def sample_update_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -907,23 +1238,28 @@ def sample_update_bucket(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + # Done; return the response. return response - def delete_bucket( + def create_bucket( self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a log bucket. - - Changes the bucket's ``lifecycle_state`` to the - ``DELETE_REQUESTED`` state. After 7 days, the bucket will be - purged and all log entries in the bucket will be permanently - deleted. + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. .. code-block:: python @@ -936,64 +1272,84 @@ def delete_bucket( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_delete_bucket(): + def sample_create_bucket(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( - name="name_value", + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", ) # Make the request - client.delete_bucket(request=request) + response = client.create_bucket(request=request) + + # Handle the response + print(response) Args: - request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): - The request object. The parameters to ``DeleteBucket``. + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to ``CreateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.DeleteBucketRequest): - request = logging_config.DeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + rpc = self._transport._wrapped_methods[self._transport.create_bucket] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def undelete_bucket( + # Done; return the response. + return response + + def update_bucket( self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Undeletes a log bucket. A bucket that has been - deleted can be undeleted within the grace period of 7 - days. + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Updates a log bucket. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. .. code-block:: python @@ -1006,36 +1362,197 @@ def undelete_bucket( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_undelete_bucket(): + def sample_update_bucket(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.UpdateBucketRequest( name="name_value", ) # Make the request - client.undelete_bucket(request=request) + response = client.update_bucket(request=request) + + # Handle the response + print(response) Args: - request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): - The request object. The parameters to ``UndeleteBucket``. + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to ``UpdateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UndeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UndeleteBucketRequest): - request = logging_config.UndeleteBucketRequest(request) + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - # Wrap the RPC method; this adds retry and timeout information, + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): + The request object. The parameters to ``DeleteBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): + The request object. The parameters to ``UndeleteBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] @@ -1045,6 +1562,9 @@ def sample_undelete_bucket(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1060,7 +1580,7 @@ def list_views( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. @@ -1107,31 +1627,35 @@ def sample_list_views(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListViewsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1149,6 +1673,9 @@ def sample_list_views(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1163,6 +1690,8 @@ def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1175,7 +1704,7 @@ def get_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1211,8 +1740,10 @@ def sample_get_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1221,10 +1752,8 @@ def sample_get_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetViewRequest): request = logging_config.GetViewRequest(request) @@ -1238,6 +1767,9 @@ def sample_get_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1255,7 +1787,7 @@ def create_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1293,8 +1825,10 @@ def sample_create_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1303,10 +1837,8 @@ def sample_create_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateViewRequest): request = logging_config.CreateViewRequest(request) @@ -1320,6 +1852,9 @@ def sample_create_view(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1337,7 +1872,7 @@ def update_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1377,8 +1912,10 @@ def sample_update_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1387,10 +1924,8 @@ def sample_update_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateViewRequest): request = logging_config.UpdateViewRequest(request) @@ -1404,6 +1939,9 @@ def sample_update_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1421,7 +1959,7 @@ def delete_view( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1457,14 +1995,14 @@ def sample_delete_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteViewRequest): request = logging_config.DeleteViewRequest(request) @@ -1478,6 +2016,9 @@ def sample_delete_view(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1493,7 +2034,7 @@ def list_sinks( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -1544,8 +2085,10 @@ def sample_list_sinks(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: @@ -1556,19 +2099,20 @@ def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListSinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1586,6 +2130,9 @@ def sample_list_sinks(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1600,6 +2147,8 @@ def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1613,7 +2162,7 @@ def get_sink( sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1666,8 +2215,10 @@ def sample_get_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1683,19 +2234,20 @@ def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1715,6 +2267,9 @@ def sample_get_sink(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1734,7 +2289,7 @@ def create_sink( sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -1803,8 +2358,10 @@ def sample_create_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1820,19 +2377,20 @@ def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, sink] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1852,6 +2410,9 @@ def sample_create_sink(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1872,7 +2433,7 @@ def update_sink( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -1892,132 +2453,649 @@ def update_sink( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_update_sink(): + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): + The request object. The parameters to ``UpdateSink``. + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + For example: ``updateMask=filter`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): + The request object. The parameters to ``DeleteSink``. + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [sink_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]): + The request object. The parameters to CreateLink. + parent (str): + Required. The full resource name of the bucket to create + a link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link (google.cloud.logging_v2.types.Link): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (str): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, link, link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + def delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]): + The request object. The parameters to DeleteLink. + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_links(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() - sink.name = "name_value" - sink.destination = "destination_value" - - request = logging_v2.UpdateSinkRequest( - sink_name="sink_name_value", - sink=sink, + request = logging_v2.ListLinksRequest( + parent="parent_value", ) # Make the request - response = client.update_sink(request=request) + page_result = client.list_links(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): - The request object. The parameters to ``UpdateSink``. - sink_name (str): - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: - - :: - - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" - - For example: - - ``"projects/my-project/sinks/my-sink"`` - - This corresponds to the ``sink_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - sink (google.cloud.logging_v2.types.LogSink): - Required. The updated sink, whose name is the same - identifier that appears as part of ``sink_name``. - - This corresponds to the ``sink`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Field mask that specifies the fields in - ``sink`` that need an update. A sink field will be - overwritten if, and only if, it is in the update mask. - ``name`` and output only fields cannot be updated. - - An empty ``updateMask`` is temporarily treated as using - the following mask for backwards compatibility purposes: - - ``destination,filter,includeChildren`` - - At some point in the future, behavior will be removed - and specifying an empty ``updateMask`` will be an error. - - For a detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + request (Union[google.cloud.logging_v2.types.ListLinksRequest, dict]): + The request object. The parameters to ListLinks. + parent (str): + Required. The parent resource whose links are to be + listed: - For example: ``updateMask=filter`` + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: - google.cloud.logging_v2.types.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, a - Pub/Sub topic or a Cloud Logging log - bucket. A logs filter controls which log - entries are exported. The sink must be - created within a project, organization, - billing account, or folder. + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.UpdateSinkRequest): - request = logging_config.UpdateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: - request.sink_name = sink_name - if sink is not None: - request.sink = sink - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_sink] + rpc = self._transport._wrapped_methods[self._transport.list_links] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("sink_name", request.sink_name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2026,20 +3104,30 @@ def sample_update_sink(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + # Done; return the response. return response - def delete_sink( + def get_link( self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, *, - sink_name: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a sink. If the sink has a unique ``writer_identity``, - then that service account is also deleted. + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: + r"""Gets a link. .. code-block:: python @@ -2052,86 +3140,95 @@ def delete_sink( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_delete_sink(): + def sample_get_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( - sink_name="sink_name_value", + request = logging_v2.GetLinkRequest( + name="name_value", ) # Make the request - client.delete_sink(request=request) - - Args: - request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): - The request object. The parameters to ``DeleteSink``. - sink_name (str): - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: - - :: + response = client.get_link(request=request) - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" + # Handle the response + print(response) - For example: + Args: + request (Union[google.cloud.logging_v2.types.GetLinkRequest, dict]): + The request object. The parameters to GetLink. + name (str): + Required. The resource name of the link: - ``"projects/my-project/sinks/my-sink"`` + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] - This corresponds to the ``sink_name`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, logging_config.DeleteSinkRequest): - request = logging_config.DeleteSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: - request.sink_name = sink_name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_sink] + rpc = self._transport._wrapped_methods[self._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("sink_name", request.sink_name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) + # Done; return the response. + return response + def list_exclusions( self, request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, @@ -2139,7 +3236,7 @@ def list_exclusions( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2191,8 +3288,10 @@ def sample_list_exclusions(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: @@ -2203,19 +3302,20 @@ def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2233,6 +3333,9 @@ def sample_list_exclusions(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2247,6 +3350,8 @@ def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2260,7 +3365,7 @@ def get_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2313,8 +3418,10 @@ def sample_get_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2328,19 +3435,20 @@ def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2358,6 +3466,9 @@ def sample_get_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2377,7 +3488,7 @@ def create_exclusion( exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -2447,8 +3558,10 @@ def sample_create_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2462,19 +3575,20 @@ def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, exclusion] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2494,6 +3608,9 @@ def sample_create_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2514,7 +3631,7 @@ def update_exclusion( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -2595,8 +3712,10 @@ def sample_update_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2610,19 +3729,20 @@ def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, exclusion, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2644,6 +3764,9 @@ def sample_update_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2662,7 +3785,7 @@ def delete_exclusion( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -2713,23 +3836,26 @@ def sample_delete_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2747,6 +3873,9 @@ def sample_delete_exclusion(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -2761,7 +3890,7 @@ def get_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -2811,8 +3940,10 @@ def sample_get_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -2831,10 +3962,8 @@ def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetCmekSettingsRequest): request = logging_config.GetCmekSettingsRequest(request) @@ -2848,6 +3977,9 @@ def sample_get_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2865,7 +3997,7 @@ def update_cmek_settings( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -2920,8 +4052,10 @@ def sample_update_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -2940,10 +4074,8 @@ def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateCmekSettingsRequest): request = logging_config.UpdateCmekSettingsRequest(request) @@ -2957,6 +4089,9 @@ def sample_update_cmek_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2975,7 +4110,7 @@ def get_settings( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3050,8 +4185,10 @@ def sample_get_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3061,19 +4198,20 @@ def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSettingsRequest): request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3091,6 +4229,9 @@ def sample_get_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3110,7 +4251,7 @@ def update_settings( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -3192,8 +4333,10 @@ def sample_update_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3203,19 +4346,20 @@ def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [settings, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSettingsRequest): request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3235,6 +4379,9 @@ def sample_update_settings(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3252,7 +4399,7 @@ def copy_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3294,8 +4441,10 @@ def sample_copy_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -3308,10 +4457,8 @@ def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CopyLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CopyLogEntriesRequest): request = logging_config.CopyLogEntriesRequest(request) @@ -3319,6 +4466,9 @@ def sample_copy_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3351,10 +4501,185 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("ConfigServiceV2Client",) diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py index 3c5ce7754..62906815e 100644 --- a/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.logging_v2.types import logging_config @@ -51,7 +64,9 @@ def __init__( request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -62,12 +77,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +100,12 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogBucket]: @@ -113,7 +140,9 @@ def __init__( request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -124,12 +153,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +176,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: @@ -179,7 +220,9 @@ def __init__( request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -190,12 +233,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -206,7 +256,12 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogView]: @@ -241,7 +296,9 @@ def __init__( request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -252,12 +309,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -268,7 +332,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogView]: @@ -307,7 +376,9 @@ def __init__( request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -318,12 +389,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -334,7 +412,12 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogSink]: @@ -369,7 +452,9 @@ def __init__( request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -380,12 +465,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -396,7 +488,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: @@ -411,6 +508,162 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListLinksPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[logging_config.Link]: + for page in self.pages: + yield from page.links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLinksAsyncPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[logging_config.Link]: + async def async_generator(): + async for page in self.pages: + for response in page.links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListExclusionsPager: """A pager for iterating through ``list_exclusions`` requests. @@ -435,7 +688,9 @@ def __init__( request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -446,12 +701,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -462,7 +724,12 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogExclusion]: @@ -497,7 +764,9 @@ def __init__( request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -508,12 +777,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -524,7 +800,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/README.rst b/google/cloud/logging_v2/services/config_service_v2/transports/README.rst new file mode 100644 index 000000000..4ea848796 --- /dev/null +++ b/google/cloud/logging_v2/services/config_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ConfigServiceV2Transport` is the ABC for all transports. +- public child `ConfigServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ConfigServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseConfigServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ConfigServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 93a29df09..6f8979ef8 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 53046583b..db7b93b85 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore @@ -35,6 +36,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -65,7 +69,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -90,6 +94,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +108,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) @@ -128,6 +134,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -141,6 +151,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_bucket_async: gapic_v1.method.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), self.create_bucket: gapic_v1.method.wrap_method( self.create_bucket, default_timeout=None, @@ -255,6 +275,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_link: gapic_v1.method.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), self.list_exclusions: gapic_v1.method.wrap_method( self.list_exclusions, default_retry=retries.Retry( @@ -338,6 +378,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -375,6 +430,24 @@ def get_bucket( ]: raise NotImplementedError() + @property + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def create_bucket( self, @@ -507,6 +580,45 @@ def delete_sink( ]: raise NotImplementedError() + @property + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_link( + self, + ) -> Callable[ + [logging_config.GetLinkRequest], + Union[logging_config.Link, Awaitable[logging_config.Link]], + ]: + raise NotImplementedError() + @property def list_exclusions( self, @@ -600,6 +712,33 @@ def copy_log_entries( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 97c220686..4dee4e647 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +25,91 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): """gRPC backend transport for ConfigServiceV2. @@ -53,7 +133,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -67,20 +147,23 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -90,11 +173,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -121,9 +204,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -162,7 +246,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -178,7 +264,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -242,7 +333,9 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) # Return the client from cache. return self._operations_client @@ -268,7 +361,7 @@ def list_buckets( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_buckets" not in self._stubs: - self._stubs["list_buckets"] = self.grpc_channel.unary_unary( + self._stubs["list_buckets"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -294,13 +387,74 @@ def get_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_bucket" not in self._stubs: - self._stubs["get_bucket"] = self.grpc_channel.unary_unary( + self._stubs["get_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) return self._stubs["get_bucket"] + @property + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], operations_pb2.Operation]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_bucket_async"] + + @property + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], operations_pb2.Operation]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_bucket_async"] + @property def create_bucket( self, @@ -322,7 +476,7 @@ def create_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_bucket" not in self._stubs: - self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + self._stubs["create_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -335,12 +489,7 @@ def update_bucket( ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. - Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -359,7 +508,7 @@ def update_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_bucket" not in self._stubs: - self._stubs["update_bucket"] = self.grpc_channel.unary_unary( + self._stubs["update_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -390,7 +539,7 @@ def delete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_bucket" not in self._stubs: - self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -418,7 +567,7 @@ def undelete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "undelete_bucket" not in self._stubs: - self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -444,7 +593,7 @@ def list_views( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_views" not in self._stubs: - self._stubs["list_views"] = self.grpc_channel.unary_unary( + self._stubs["list_views"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -470,7 +619,7 @@ def get_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_view" not in self._stubs: - self._stubs["get_view"] = self.grpc_channel.unary_unary( + self._stubs["get_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -497,7 +646,7 @@ def create_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_view" not in self._stubs: - self._stubs["create_view"] = self.grpc_channel.unary_unary( + self._stubs["create_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -527,7 +676,7 @@ def update_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_view" not in self._stubs: - self._stubs["update_view"] = self.grpc_channel.unary_unary( + self._stubs["update_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -556,7 +705,7 @@ def delete_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_view" not in self._stubs: - self._stubs["delete_view"] = self.grpc_channel.unary_unary( + self._stubs["delete_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -582,7 +731,7 @@ def list_sinks( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_sinks" not in self._stubs: - self._stubs["list_sinks"] = self.grpc_channel.unary_unary( + self._stubs["list_sinks"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -608,7 +757,7 @@ def get_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_sink" not in self._stubs: - self._stubs["get_sink"] = self.grpc_channel.unary_unary( + self._stubs["get_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -638,7 +787,7 @@ def create_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_sink" not in self._stubs: - self._stubs["create_sink"] = self.grpc_channel.unary_unary( + self._stubs["create_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -669,7 +818,7 @@ def update_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_sink" not in self._stubs: - self._stubs["update_sink"] = self.grpc_channel.unary_unary( + self._stubs["update_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -696,13 +845,121 @@ def delete_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_sink" not in self._stubs: - self._stubs["delete_sink"] = self.grpc_channel.unary_unary( + self._stubs["delete_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] + @property + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], operations_pb2.Operation]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_link"] + + @property + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], operations_pb2.Operation]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_link"] + + @property + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], logging_config.ListLinksResponse]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + ~.ListLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs["list_links"] + + @property + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], logging_config.Link]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + ~.Link]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs["get_link"] + @property def list_exclusions( self, @@ -725,7 +982,7 @@ def list_exclusions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_exclusions" not in self._stubs: - self._stubs["list_exclusions"] = self.grpc_channel.unary_unary( + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -751,7 +1008,7 @@ def get_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_exclusion" not in self._stubs: - self._stubs["get_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -779,7 +1036,7 @@ def create_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_exclusion" not in self._stubs: - self._stubs["create_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -806,7 +1063,7 @@ def update_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_exclusion" not in self._stubs: - self._stubs["update_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -832,7 +1089,7 @@ def delete_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_exclusion" not in self._stubs: - self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -867,7 +1124,7 @@ def get_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_cmek_settings" not in self._stubs: - self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -909,7 +1166,7 @@ def update_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_cmek_settings" not in self._stubs: - self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -945,7 +1202,7 @@ def get_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_settings" not in self._stubs: - self._stubs["get_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -988,7 +1245,7 @@ def update_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_settings" not in self._stubs: - self._stubs["update_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1015,7 +1272,7 @@ def copy_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "copy_log_entries" not in self._stubs: - self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1023,7 +1280,60 @@ def copy_log_entries( return self._stubs["copy_log_entries"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] @property def kind(self) -> str: diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 0d0737576..2686f80e3 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,16 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config @@ -31,6 +40,82 @@ from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): """gRPC AsyncIO backend transport for ConfigServiceV2. @@ -68,7 +153,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -98,7 +182,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -112,21 +196,24 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -136,11 +223,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -167,9 +254,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -207,7 +295,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -223,7 +313,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -246,7 +342,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -274,7 +370,7 @@ def list_buckets( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_buckets" not in self._stubs: - self._stubs["list_buckets"] = self.grpc_channel.unary_unary( + self._stubs["list_buckets"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -302,13 +398,78 @@ def get_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_bucket" not in self._stubs: - self._stubs["get_bucket"] = self.grpc_channel.unary_unary( + self._stubs["get_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) return self._stubs["get_bucket"] + @property + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_bucket_async"] + + @property + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_bucket_async"] + @property def create_bucket( self, @@ -332,7 +493,7 @@ def create_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_bucket" not in self._stubs: - self._stubs["create_bucket"] = self.grpc_channel.unary_unary( + self._stubs["create_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -347,12 +508,7 @@ def update_bucket( ]: r"""Return a callable for the update bucket method over gRPC. - Updates a log bucket. This method replaces the following fields - in the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - ``FAILED_PRECONDITION`` will be returned. + Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, then ``FAILED_PRECONDITION`` will be returned. @@ -371,7 +527,7 @@ def update_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_bucket" not in self._stubs: - self._stubs["update_bucket"] = self.grpc_channel.unary_unary( + self._stubs["update_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -402,7 +558,7 @@ def delete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_bucket" not in self._stubs: - self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -430,7 +586,7 @@ def undelete_bucket( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "undelete_bucket" not in self._stubs: - self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -458,7 +614,7 @@ def list_views( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_views" not in self._stubs: - self._stubs["list_views"] = self.grpc_channel.unary_unary( + self._stubs["list_views"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -484,7 +640,7 @@ def get_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_view" not in self._stubs: - self._stubs["get_view"] = self.grpc_channel.unary_unary( + self._stubs["get_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -513,7 +669,7 @@ def create_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_view" not in self._stubs: - self._stubs["create_view"] = self.grpc_channel.unary_unary( + self._stubs["create_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -545,7 +701,7 @@ def update_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_view" not in self._stubs: - self._stubs["update_view"] = self.grpc_channel.unary_unary( + self._stubs["update_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -574,7 +730,7 @@ def delete_view( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_view" not in self._stubs: - self._stubs["delete_view"] = self.grpc_channel.unary_unary( + self._stubs["delete_view"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -602,7 +758,7 @@ def list_sinks( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_sinks" not in self._stubs: - self._stubs["list_sinks"] = self.grpc_channel.unary_unary( + self._stubs["list_sinks"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -628,7 +784,7 @@ def get_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_sink" not in self._stubs: - self._stubs["get_sink"] = self.grpc_channel.unary_unary( + self._stubs["get_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -660,7 +816,7 @@ def create_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_sink" not in self._stubs: - self._stubs["create_sink"] = self.grpc_channel.unary_unary( + self._stubs["create_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -693,7 +849,7 @@ def update_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_sink" not in self._stubs: - self._stubs["update_sink"] = self.grpc_channel.unary_unary( + self._stubs["update_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -720,13 +876,127 @@ def delete_sink( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_sink" not in self._stubs: - self._stubs["delete_sink"] = self.grpc_channel.unary_unary( + self._stubs["delete_sink"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] + @property + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_link"] + + @property + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_link"] + + @property + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse] + ]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + Awaitable[~.ListLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs["list_links"] + + @property + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + Awaitable[~.Link]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs["get_link"] + @property def list_exclusions( self, @@ -750,7 +1020,7 @@ def list_exclusions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_exclusions" not in self._stubs: - self._stubs["list_exclusions"] = self.grpc_channel.unary_unary( + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -778,7 +1048,7 @@ def get_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_exclusion" not in self._stubs: - self._stubs["get_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -808,7 +1078,7 @@ def create_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_exclusion" not in self._stubs: - self._stubs["create_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -837,7 +1107,7 @@ def update_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_exclusion" not in self._stubs: - self._stubs["update_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -863,7 +1133,7 @@ def delete_exclusion( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_exclusion" not in self._stubs: - self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -900,7 +1170,7 @@ def get_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_cmek_settings" not in self._stubs: - self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -943,7 +1213,7 @@ def update_cmek_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_cmek_settings" not in self._stubs: - self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -981,7 +1251,7 @@ def get_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_settings" not in self._stubs: - self._stubs["get_settings"] = self.grpc_channel.unary_unary( + self._stubs["get_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1026,7 +1296,7 @@ def update_settings( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_settings" not in self._stubs: - self._stubs["update_settings"] = self.grpc_channel.unary_unary( + self._stubs["update_settings"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1055,15 +1325,334 @@ def copy_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "copy_log_entries" not in self._stubs: - self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["copy_log_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_buckets: self._wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: self._wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket_async: self._wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: self._wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: self._wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: self._wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: self._wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: self._wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.list_views: self._wrap_method( + self.list_views, + default_timeout=None, + client_info=client_info, + ), + self.get_view: self._wrap_method( + self.get_view, + default_timeout=None, + client_info=client_info, + ), + self.create_view: self._wrap_method( + self.create_view, + default_timeout=None, + client_info=client_info, + ), + self.update_view: self._wrap_method( + self.update_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_view: self._wrap_method( + self.delete_view, + default_timeout=None, + client_info=client_info, + ), + self.list_sinks: self._wrap_method( + self.list_sinks, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: self._wrap_method( + self.get_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: self._wrap_method( + self.create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self.update_sink: self._wrap_method( + self.update_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: self._wrap_method( + self.delete_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_link: self._wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: self._wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: self._wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: self._wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), + self.list_exclusions: self._wrap_method( + self.list_exclusions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: self._wrap_method( + self.get_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: self._wrap_method( + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.update_exclusion: self._wrap_method( + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.delete_exclusion: self._wrap_method( + self.delete_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: self._wrap_method( + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_cmek_settings: self._wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.get_settings: self._wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: self._wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), + self.copy_log_entries: self._wrap_method( + self.copy_log_entries, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] __all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 41b2a2d15..41c0dc4fa 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index bd8ba63f0..8de507845 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict -import functools import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -36,31 +37,47 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf + try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" _client: LoggingServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = LoggingServiceV2Client._DEFAULT_UNIVERSE log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) @@ -167,19 +184,40 @@ def transport(self) -> LoggingServiceV2Transport: """ return self._client.transport - get_transport_class = functools.partial( - type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client) - ) + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LoggingServiceV2Client.get_transport_class def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the logging service v2 client. + """Instantiates the logging service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -187,26 +225,43 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -218,6 +273,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + }, + ) + async def delete_log( self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, @@ -225,7 +302,7 @@ async def delete_log( log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -277,23 +354,31 @@ async def sample_delete_log(): This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [log_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging.DeleteLogRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -302,22 +387,9 @@ async def sample_delete_log(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log + ] # Certain fields should be provided within the metadata header; # add these here. @@ -325,6 +397,9 @@ async def sample_delete_log(): gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -343,7 +418,7 @@ async def write_log_entries( entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -471,27 +546,35 @@ async def sample_write_log_entries(): This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging.WriteLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -507,22 +590,12 @@ async def sample_write_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write_log_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.write_log_entries + ] + + # Validate the universe domain. + self._client._validate_universe_domain() # Send the request. response = await rpc( @@ -544,7 +617,7 @@ async def list_log_entries( order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -598,21 +671,19 @@ async def sample_list_log_entries(): - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -630,11 +701,13 @@ async def sample_list_log_entries(): This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: @@ -645,16 +718,22 @@ async def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource_names, filter, order_by] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging.ListLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -667,22 +746,12 @@ async def sample_list_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_entries + ] + + # Validate the universe domain. + self._client._validate_universe_domain() # Send the request. response = await rpc( @@ -698,6 +767,8 @@ async def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -712,7 +783,7 @@ async def list_monitored_resource_descriptors( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -747,11 +818,13 @@ async def sample_list_monitored_resource_descriptors(): request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: @@ -763,26 +836,19 @@ async def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_monitored_resource_descriptors + ] + + # Validate the universe domain. + self._client._validate_universe_domain() # Send the request. response = await rpc( @@ -798,6 +864,8 @@ async def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -811,7 +879,7 @@ async def list_logs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -848,7 +916,7 @@ async def sample_list_logs(): request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): The request object. The parameters to ListLogs. parent (:class:`str`): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - ``projects/[PROJECT_ID]`` - ``organizations/[ORGANIZATION_ID]`` @@ -858,31 +926,40 @@ async def sample_list_logs(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging.ListLogsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -891,22 +968,9 @@ async def sample_list_logs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_logs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_logs + ] # Certain fields should be provided within the metadata header; # add these here. @@ -914,6 +978,9 @@ async def sample_list_logs(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -928,6 +995,8 @@ async def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -940,7 +1009,7 @@ def tail_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -986,11 +1055,13 @@ def request_generator(): Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to ``TailLogEntries``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -999,22 +1070,12 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.tail_log_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.tail_log_entries + ] + + # Validate the universe domain. + self._client._validate_universe_domain() # Send the request. response = rpc( @@ -1027,7 +1088,172 @@ def request_generator(): # Done; return the response. return response - async def __aenter__(self): + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): @@ -1038,5 +1264,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("LoggingServiceV2AsyncClient",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 7949a41a9..22318f07a 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,10 +14,14 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -30,6 +34,7 @@ Union, cast, ) +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -42,16 +47,27 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -126,11 +142,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -276,7 +296,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -306,6 +326,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -339,11 +364,180 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LoggingServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -355,25 +549,37 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -384,17 +590,38 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = LoggingServiceV2Client._read_environment_variables() + self._client_cert_source = LoggingServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = LoggingServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -403,20 +630,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, LoggingServiceV2Transport): + transport_provided = isinstance(transport, LoggingServiceV2Transport) + if transport_provided: # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(LoggingServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or LoggingServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -426,19 +666,50 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) - self._transport = Transport( + transport_init: Union[ + Type[LoggingServiceV2Transport], + Callable[..., LoggingServiceV2Transport], + ] = ( + LoggingServiceV2Client.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LoggingServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2Client`.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + }, + ) + def delete_log( self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, @@ -446,7 +717,7 @@ def delete_log( log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -501,23 +772,26 @@ def sample_delete_log(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [log_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.DeleteLogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the @@ -535,6 +809,9 @@ def sample_delete_log(): gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -553,7 +830,7 @@ def write_log_entries( entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -684,27 +961,30 @@ def sample_write_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.WriteLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -722,6 +1002,9 @@ def sample_write_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -742,7 +1025,7 @@ def list_log_entries( order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -796,21 +1079,19 @@ def sample_list_log_entries(): - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -831,8 +1112,10 @@ def sample_list_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: @@ -843,19 +1126,20 @@ def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource_names, filter, order_by] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -871,6 +1155,9 @@ def sample_list_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -885,6 +1172,8 @@ def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -899,7 +1188,7 @@ def list_monitored_resource_descriptors( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -937,8 +1226,10 @@ def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: @@ -950,10 +1241,8 @@ def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListMonitoredResourceDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -963,6 +1252,9 @@ def sample_list_monitored_resource_descriptors(): self._transport.list_monitored_resource_descriptors ] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -977,6 +1269,8 @@ def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -990,7 +1284,7 @@ def list_logs( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -1027,7 +1321,7 @@ def sample_list_logs(): request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - ``projects/[PROJECT_ID]`` - ``organizations/[ORGANIZATION_ID]`` @@ -1040,31 +1334,35 @@ def sample_list_logs(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1082,6 +1380,9 @@ def sample_list_logs(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1096,6 +1397,8 @@ def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1108,7 +1411,7 @@ def tail_log_entries( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -1157,8 +1460,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -1169,6 +1474,9 @@ def request_generator(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1193,10 +1501,185 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("LoggingServiceV2Client",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py index e1e7188cd..f19ad6304 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging @@ -53,7 +66,9 @@ def __init__( request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -64,12 +79,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -80,7 +102,12 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[log_entry.LogEntry]: @@ -115,7 +142,9 @@ def __init__( request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -126,12 +155,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -142,7 +178,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: @@ -181,7 +222,9 @@ def __init__( request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -192,12 +235,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -208,7 +258,12 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: @@ -245,7 +300,9 @@ def __init__( request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -256,12 +313,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -274,7 +338,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__( @@ -315,7 +384,9 @@ def __init__( request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -326,12 +397,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -342,7 +420,12 @@ def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -377,7 +460,9 @@ def __init__( request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -388,12 +473,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -404,7 +496,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[str]: diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst b/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst new file mode 100644 index 000000000..897a4c7bf --- /dev/null +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`LoggingServiceV2Transport` is the ABC for all transports. +- public child `LoggingServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `LoggingServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseLoggingServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `LoggingServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 4e0163fe6..48f0b711c 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 22665b15e..7f7cfe9a2 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,14 +25,19 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -64,7 +69,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -89,6 +94,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -101,7 +108,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) @@ -127,6 +134,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -226,6 +237,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -299,6 +325,33 @@ def tail_log_entries( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a6878b6fa..7bffe25b6 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,13 +24,91 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): """gRPC backend transport for LoggingServiceV2. @@ -51,7 +132,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -65,20 +146,23 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -88,11 +172,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,9 +202,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -159,7 +244,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -175,7 +262,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -251,7 +343,7 @@ def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log" not in self._stubs: - self._stubs["delete_log"] = self.grpc_channel.unary_unary( + self._stubs["delete_log"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -283,7 +375,7 @@ def write_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_log_entries" not in self._stubs: - self._stubs["write_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -312,7 +404,7 @@ def list_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_entries" not in self._stubs: - self._stubs["list_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -345,7 +437,7 @@ def list_monitored_resource_descriptors( if "list_monitored_resource_descriptors" not in self._stubs: self._stubs[ "list_monitored_resource_descriptors" - ] = self.grpc_channel.unary_unary( + ] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -373,7 +465,7 @@ def list_logs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_logs" not in self._stubs: - self._stubs["list_logs"] = self.grpc_channel.unary_unary( + self._stubs["list_logs"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -401,7 +493,7 @@ def tail_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "tail_log_entries" not in self._stubs: - self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -409,7 +501,60 @@ def tail_log_entries( return self._stubs["tail_log_entries"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] @property def kind(self) -> str: diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 2e8f76017..f73ac1150 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,22 +13,108 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): """gRPC AsyncIO backend transport for LoggingServiceV2. @@ -66,7 +152,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -96,7 +181,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -110,21 +195,24 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -134,11 +222,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -164,9 +252,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -204,7 +293,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -220,7 +311,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -256,7 +353,7 @@ def delete_log( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log" not in self._stubs: - self._stubs["delete_log"] = self.grpc_channel.unary_unary( + self._stubs["delete_log"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -290,7 +387,7 @@ def write_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_log_entries" not in self._stubs: - self._stubs["write_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -321,7 +418,7 @@ def list_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_entries" not in self._stubs: - self._stubs["list_log_entries"] = self.grpc_channel.unary_unary( + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -354,7 +451,7 @@ def list_monitored_resource_descriptors( if "list_monitored_resource_descriptors" not in self._stubs: self._stubs[ "list_monitored_resource_descriptors" - ] = self.grpc_channel.unary_unary( + ] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -382,7 +479,7 @@ def list_logs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_logs" not in self._stubs: - self._stubs["list_logs"] = self.grpc_channel.unary_unary( + self._stubs["list_logs"] = self._logged_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -412,15 +509,193 @@ def tail_log_entries( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "tail_log_entries" not in self._stubs: - self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream( + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) return self._stubs["tail_log_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.delete_log: self._wrap_method( + self.delete_log, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: self._wrap_method( + self.write_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: self._wrap_method( + self.list_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: self._wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: self._wrap_method( + self.list_logs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: self._wrap_method( + self.tail_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] __all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index fc0615f19..41a1ef4a6 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index bcffd416c..129fc055b 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict -import functools import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -33,32 +34,48 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf + try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class MetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" _client: MetricsServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MetricsServiceV2Client._DEFAULT_UNIVERSE log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) @@ -165,19 +182,40 @@ def transport(self) -> MetricsServiceV2Transport: """ return self._client.transport - get_transport_class = functools.partial( - type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client) - ) + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MetricsServiceV2Client.get_transport_class def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the metrics service v2 client. + """Instantiates the metrics service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -185,26 +223,43 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -216,6 +271,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + }, + ) + async def list_log_metrics( self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, @@ -223,7 +300,7 @@ async def list_log_metrics( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -268,31 +345,40 @@ async def sample_list_log_metrics(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_metrics.ListLogMetricsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -301,22 +387,9 @@ async def sample_list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_metrics, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_metrics + ] # Certain fields should be provided within the metadata header; # add these here. @@ -324,6 +397,9 @@ async def sample_list_log_metrics(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -338,6 +414,8 @@ async def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -351,7 +429,7 @@ async def get_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -394,11 +472,13 @@ async def sample_get_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -406,6 +486,7 @@ async def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -416,16 +497,22 @@ async def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_metrics.GetLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -434,22 +521,9 @@ async def sample_get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_log_metric, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -459,6 +533,9 @@ async def sample_get_log_metric(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -478,7 +555,7 @@ async def create_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -537,11 +614,13 @@ async def sample_create_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -549,6 +628,7 @@ async def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -559,16 +639,22 @@ async def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_metrics.CreateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -579,11 +665,9 @@ async def sample_create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_log_metric, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -591,6 +675,9 @@ async def sample_create_log_metric(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -610,7 +697,7 @@ async def update_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -668,11 +755,13 @@ async def sample_update_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -680,6 +769,7 @@ async def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -690,16 +780,22 @@ async def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [metric_name, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_metrics.UpdateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -710,22 +806,9 @@ async def sample_update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_log_metric, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -735,6 +818,9 @@ async def sample_update_log_metric(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -753,7 +839,7 @@ async def delete_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -793,23 +879,31 @@ async def sample_delete_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = logging_metrics.DeleteLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -818,22 +912,9 @@ async def sample_delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log_metric, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -843,6 +924,174 @@ async def sample_delete_log_metric(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -851,7 +1100,7 @@ async def sample_delete_log_metric(): metadata=metadata, ) - async def __aenter__(self): + async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): @@ -862,5 +1111,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("MetricsServiceV2AsyncClient",) diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index a75c1e935..f2f0f8ce1 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,10 +14,14 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -28,6 +32,7 @@ Union, cast, ) +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -40,16 +45,27 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport @@ -125,11 +141,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -275,7 +295,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -305,6 +325,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -338,11 +363,180 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MetricsServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -354,25 +548,37 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -383,17 +589,38 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MetricsServiceV2Client._read_environment_variables() + self._client_cert_source = MetricsServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MetricsServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -402,20 +629,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, MetricsServiceV2Transport): + transport_provided = isinstance(transport, MetricsServiceV2Transport) + if transport_provided: # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(MetricsServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or MetricsServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -425,19 +665,50 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) - self._transport = Transport( + transport_init: Union[ + Type[MetricsServiceV2Transport], + Callable[..., MetricsServiceV2Transport], + ] = ( + MetricsServiceV2Client.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetricsServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2Client`.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + }, + ) + def list_log_metrics( self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, @@ -445,7 +716,7 @@ def list_log_metrics( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -493,31 +764,35 @@ def sample_list_log_metrics(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.ListLogMetricsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -535,6 +810,9 @@ def sample_list_log_metrics(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -549,6 +827,8 @@ def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -562,7 +842,7 @@ def get_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -608,8 +888,10 @@ def sample_get_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -617,6 +899,7 @@ def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -627,19 +910,20 @@ def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.GetLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -659,6 +943,9 @@ def sample_get_log_metric(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -678,7 +965,7 @@ def create_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -740,8 +1027,10 @@ def sample_create_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -749,6 +1038,7 @@ def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -759,19 +1049,20 @@ def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.CreateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -791,6 +1082,9 @@ def sample_create_log_metric(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -810,7 +1104,7 @@ def update_log_metric( metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -871,8 +1165,10 @@ def sample_update_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -880,6 +1176,7 @@ def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -890,19 +1187,20 @@ def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [metric_name, metric] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.UpdateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -924,6 +1222,9 @@ def sample_update_log_metric(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -942,7 +1243,7 @@ def delete_log_metric( metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -985,23 +1286,26 @@ def sample_delete_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [metric_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.DeleteLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1021,6 +1325,9 @@ def sample_delete_log_metric(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1042,10 +1349,185 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("MetricsServiceV2Client",) diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 2c647cda1..75fc998a2 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.logging_v2.types import logging_metrics @@ -51,7 +64,9 @@ def __init__( request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -62,12 +77,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +100,12 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_metrics.LogMetric]: @@ -113,7 +140,9 @@ def __init__( request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -124,12 +153,19 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +176,12 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst b/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst new file mode 100644 index 000000000..00dffa25f --- /dev/null +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MetricsServiceV2Transport` is the ABC for all transports. +- public child `MetricsServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MetricsServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMetricsServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MetricsServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index e28f020df..4975feb99 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 6e0f1698e..22bc19736 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,14 +25,19 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -64,7 +69,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -89,6 +94,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -101,7 +108,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) @@ -127,6 +134,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -199,6 +210,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -258,6 +284,33 @@ def delete_log_metric( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 7eac78848..fe0943a94 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,13 +24,91 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): """gRPC backend transport for MetricsServiceV2. @@ -51,7 +132,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -65,20 +146,23 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -88,11 +172,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,9 +202,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -159,7 +244,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -175,7 +262,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -251,7 +343,7 @@ def list_log_metrics( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_metrics" not in self._stubs: - self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary( + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -277,7 +369,7 @@ def get_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_log_metric" not in self._stubs: - self._stubs["get_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -303,7 +395,7 @@ def create_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_log_metric" not in self._stubs: - self._stubs["create_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -329,7 +421,7 @@ def update_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_log_metric" not in self._stubs: - self._stubs["update_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -355,7 +447,7 @@ def delete_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log_metric" not in self._stubs: - self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -363,7 +455,60 @@ def delete_log_metric( return self._stubs["delete_log_metric"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] @property def kind(self) -> str: diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 99764a592..01aa05771 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,22 +13,108 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): """gRPC AsyncIO backend transport for MetricsServiceV2. @@ -66,7 +152,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -96,7 +181,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -110,21 +195,24 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -134,11 +222,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -164,9 +252,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -204,7 +293,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -220,7 +311,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -255,7 +352,7 @@ def list_log_metrics( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_log_metrics" not in self._stubs: - self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary( + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -283,7 +380,7 @@ def get_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_log_metric" not in self._stubs: - self._stubs["get_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -311,7 +408,7 @@ def create_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_log_metric" not in self._stubs: - self._stubs["create_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -339,7 +436,7 @@ def update_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_log_metric" not in self._stubs: - self._stubs["update_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -365,15 +462,166 @@ def delete_log_metric( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_log_metric" not in self._stubs: - self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_log_metrics: self._wrap_method( + self.list_log_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: self._wrap_method( + self.get_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: self._wrap_method( + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self.update_log_metric: self._wrap_method( + self.update_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: self._wrap_method( + self.delete_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] __all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index 43b5674dd..efea79307 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,33 +34,44 @@ WriteLogEntriesResponse, ) from .logging_config import ( + BigQueryDataset, BigQueryOptions, + BucketMetadata, CmekSettings, CopyLogEntriesMetadata, CopyLogEntriesRequest, CopyLogEntriesResponse, CreateBucketRequest, CreateExclusionRequest, + CreateLinkRequest, CreateSinkRequest, CreateViewRequest, DeleteBucketRequest, DeleteExclusionRequest, + DeleteLinkRequest, DeleteSinkRequest, DeleteViewRequest, GetBucketRequest, GetCmekSettingsRequest, GetExclusionRequest, + GetLinkRequest, GetSettingsRequest, GetSinkRequest, GetViewRequest, + IndexConfig, + Link, + LinkMetadata, ListBucketsRequest, ListBucketsResponse, ListExclusionsRequest, ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, ListSinksRequest, ListSinksResponse, ListViewsRequest, ListViewsResponse, + LocationMetadata, LogBucket, LogExclusion, LogSink, @@ -73,6 +84,7 @@ UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, + IndexType, LifecycleState, OperationState, ) @@ -103,33 +115,44 @@ "WriteLogEntriesPartialErrors", "WriteLogEntriesRequest", "WriteLogEntriesResponse", + "BigQueryDataset", "BigQueryOptions", + "BucketMetadata", "CmekSettings", "CopyLogEntriesMetadata", "CopyLogEntriesRequest", "CopyLogEntriesResponse", "CreateBucketRequest", "CreateExclusionRequest", + "CreateLinkRequest", "CreateSinkRequest", "CreateViewRequest", "DeleteBucketRequest", "DeleteExclusionRequest", + "DeleteLinkRequest", "DeleteSinkRequest", "DeleteViewRequest", "GetBucketRequest", "GetCmekSettingsRequest", "GetExclusionRequest", + "GetLinkRequest", "GetSettingsRequest", "GetSinkRequest", "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", "ListBucketsRequest", "ListBucketsResponse", "ListExclusionsRequest", "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", "ListSinksRequest", "ListSinksResponse", "ListViewsRequest", "ListViewsResponse", + "LocationMetadata", "LogBucket", "LogExclusion", "LogSink", @@ -142,6 +165,7 @@ "UpdateSettingsRequest", "UpdateSinkRequest", "UpdateViewRequest", + "IndexType", "LifecycleState", "OperationState", "CreateLogMetricRequest", diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index 0536e4db5..e52f3085d 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -92,6 +92,7 @@ class LogEntry(proto.Message): protocol buffer. Some Google Cloud Platform services use this field for their log entry payloads. + The following protocol buffer types are supported; user-defined types are not supported: @@ -169,18 +170,54 @@ class LogEntry(proto.Message): Optional. Information about an operation associated with the log entry, if applicable. trace (str): - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: - ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + Optional. The REST resource name of the trace being written + to `Cloud Trace `__ in + association with this log entry. For example, if your trace + data is stored in the Cloud project "my-trace-project" and + if the service that is creating the log entry receives a + trace header that includes the trace ID "12345", then the + service should use + "projects/my-tracing-project/traces/12345". + + The ``trace`` field provides the link between logs and + traces. By using this field, you can navigate from a log + entry to a trace. span_id (str): - Optional. The span ID within the trace associated with the - log entry. - - For Trace spans, this is the same format that the Trace API - v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as ``000000000000004a``. + Optional. The ID of the `Cloud + Trace `__ span associated + with the current operation in which the log is being + written. For example, if a span has the REST resource name + of + "projects/some-project/traces/some-trace/spans/some-span-id", + then the ``span_id`` field is "some-span-id". + + A + `Span `__ + represents a single operation within a trace. Whereas a + trace may involve multiple different microservices running + on multiple different machines, a span generally corresponds + to a single logical operation being performed in a single + instance of a microservice on one specific machine. Spans + are the nodes within the tree that is a trace. + + Applications that are `instrumented for + tracing `__ will + generally assign a new, unique span ID on each incoming + request. It is also common to create and record additional + spans corresponding to internal processing elements as well + as issuing requests to dependencies. + + The span ID is expected to be a 16-character, hexadecimal + encoding of an 8-byte array and should not be zero. It + should be unique within the trace and should, ideally, be + generated in a manner that is uniformly random. + + Example values: + + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index 4d27176d1..5b46a15e1 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -144,13 +144,15 @@ class WriteLogEntriesRequest(proto.Message): entries in this list, rather than calling this method for each individual log entry. partial_success (bool): - Optional. Whether valid entries should be written even if - some other entries fail due to INVALID_ARGUMENT or - PERMISSION_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. + Optional. Whether a batch's valid entries should be written + even if some other entry failed due to a permanent error + such as INVALID_ARGUMENT or PERMISSION_DENIED. If any entry + failed, then the response status is the response status of + one of the failed entries. The response will include error + details in ``WriteLogEntriesPartialErrors.log_entry_errors`` + keyed by the entries' zero-based index in the ``entries``. + Failed requests for which no entries are written will not + include per-entry errors. dry_run (bool): Optional. If true, the request should expect normal response, but the entries won't be @@ -235,17 +237,15 @@ class ListLogEntriesRequest(proto.Message): - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to - this list. + this list. A maximum of 100 resources may be specified in a + single request. filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not listed in ``resource_names`` will cause the filter to - return no results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. order_by (str): Optional. How the results should be sorted. Presently, the only permitted values are ``"timestamp asc"`` (default) and @@ -393,25 +393,14 @@ class ListLogsRequest(proto.Message): Attributes: parent (str): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - ``projects/[PROJECT_ID]`` - ``organizations/[ORGANIZATION_ID]`` - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - ``folders/[FOLDER_ID]`` - page_size (int): - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more - results might be available. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``pageToken`` must be the value of ``nextPageToken`` from - the previous response. The values of other method parameters - should be identical to those in the previous call. resource_names (MutableSequence[str]): - Optional. The resource name that owns the logs: + Optional. List of resource names to list logs for: - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` @@ -424,12 +413,30 @@ class ListLogsRequest(proto.Message): - ``organizations/[ORGANIZATION_ID]`` - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - ``folders/[FOLDER_ID]`` + + The resource name in the ``parent`` field is added to this + list. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. """ parent: str = proto.Field( proto.STRING, number=1, ) + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) page_size: int = proto.Field( proto.INT32, number=2, @@ -438,10 +445,6 @@ class ListLogsRequest(proto.Message): proto.STRING, number=3, ) - resource_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) class ListLogsResponse(proto.Message): @@ -493,15 +496,12 @@ class TailLogEntriesRequest(proto.Message): - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Filters `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not in ``resource_names`` will cause the filter to return no - results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. buffer_window (google.protobuf.duration_pb2.Duration): Optional. The amount of time to buffer log entries at the server before being returned to diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 9ed3a767c..6ed09222a 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,11 +26,15 @@ __protobuf__ = proto.module( package="google.logging.v2", manifest={ - "LifecycleState", "OperationState", + "LifecycleState", + "IndexType", + "IndexConfig", "LogBucket", "LogView", "LogSink", + "BigQueryDataset", + "Link", "BigQueryOptions", "ListBucketsRequest", "ListBucketsResponse", @@ -51,6 +55,11 @@ "CreateSinkRequest", "UpdateSinkRequest", "DeleteSinkRequest", + "CreateLinkRequest", + "DeleteLinkRequest", + "ListLinksRequest", + "ListLinksResponse", + "GetLinkRequest", "LogExclusion", "ListExclusionsRequest", "ListExclusionsResponse", @@ -67,29 +76,13 @@ "CopyLogEntriesRequest", "CopyLogEntriesMetadata", "CopyLogEntriesResponse", + "BucketMetadata", + "LinkMetadata", + "LocationMetadata", }, ) -class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states. - - Values: - LIFECYCLE_STATE_UNSPECIFIED (0): - Unspecified state. This is only used/useful - for distinguishing unset values. - ACTIVE (1): - The normal and active state. - DELETE_REQUESTED (2): - The resource has been marked for deletion by - the user. For some resources (e.g. buckets), - this can be reversed by an un-delete operation. - """ - LIFECYCLE_STATE_UNSPECIFIED = 0 - ACTIVE = 1 - DELETE_REQUESTED = 2 - - class OperationState(proto.Enum): r"""List of different operation states. High level state of the operation. This is used to report the @@ -123,6 +116,93 @@ class OperationState(proto.Enum): OPERATION_STATE_CANCELLED = 6 +class LifecycleState(proto.Enum): + r"""LogBucket lifecycle states. + + Values: + LIFECYCLE_STATE_UNSPECIFIED (0): + Unspecified state. This is only used/useful + for distinguishing unset values. + ACTIVE (1): + The normal and active state. + DELETE_REQUESTED (2): + The resource has been marked for deletion by + the user. For some resources (e.g. buckets), + this can be reversed by an un-delete operation. + UPDATING (3): + The resource has been marked for an update by + the user. It will remain in this state until the + update is complete. + CREATING (4): + The resource has been marked for creation by + the user. It will remain in this state until the + creation is complete. + FAILED (5): + The resource is in an INTERNAL error state. + """ + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + UPDATING = 3 + CREATING = 4 + FAILED = 5 + + +class IndexType(proto.Enum): + r"""IndexType is used for custom indexing. It describes the type + of an indexed field. + + Values: + INDEX_TYPE_UNSPECIFIED (0): + The index's type is unspecified. + INDEX_TYPE_STRING (1): + The index is a string-type index. + INDEX_TYPE_INTEGER (2): + The index is a integer-type index. + """ + INDEX_TYPE_UNSPECIFIED = 0 + INDEX_TYPE_STRING = 1 + INDEX_TYPE_INTEGER = 2 + + +class IndexConfig(proto.Message): + r"""Configuration for an indexed field. + + Attributes: + field_path (str): + Required. The LogEntry field path to index. + + Note that some paths are automatically indexed, and other + paths are not eligible for indexing. See `indexing + documentation `__ + for details. + + For example: ``jsonPayload.request.status`` + type_ (google.cloud.logging_v2.types.IndexType): + Required. The type of data in this index. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the index was + last modified. + This is used to return the timestamp, and will + be ignored if supplied during update. + """ + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "IndexType" = proto.Field( + proto.ENUM, + number=2, + enum="IndexType", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class LogBucket(proto.Message): r"""Describes a repository in which log entries are stored. @@ -160,11 +240,17 @@ class LogBucket(proto.Message): days will be used. locked (bool): Whether the bucket is locked. + The retention period on a locked bucket cannot be changed. Locked buckets may only be deleted if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. + analytics_enabled (bool): + Whether log analytics is enabled for this + bucket. + Once enabled, log analytics features cannot be + disabled. restricted_fields (MutableSequence[str]): Log entry field paths that are denied access in this bucket. @@ -175,6 +261,9 @@ class LogBucket(proto.Message): Restricting a repeated field will restrict all values. Adding a parent will block all child fields. (e.g. ``foo.bar`` will block ``foo.bar.baz``) + index_configs (MutableSequence[google.cloud.logging_v2.types.IndexConfig]): + A list of indexed fields and related + configuration data. cmek_settings (google.cloud.logging_v2.types.CmekSettings): The CMEK settings of the log bucket. If present, new log entries written to this log @@ -216,10 +305,19 @@ class LogBucket(proto.Message): number=12, enum="LifecycleState", ) + analytics_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) restricted_fields: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=15, ) + index_configs: MutableSequence["IndexConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="IndexConfig", + ) cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, @@ -332,6 +430,7 @@ class LogSink(proto.Message): ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` description (str): Optional. A description of this sink. + The maximum length of the description is 8000 characters. disabled (bool): @@ -348,7 +447,9 @@ class LogSink(proto.Message): writer_identity (str): Output only. An IAM identity—a service account or group—under which Cloud Logging writes the exported log - entries to the sink's destination. This field is set by + entries to the sink's destination. This field is either set + by specifying ``custom_writer_identity`` or set + automatically by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] @@ -363,7 +464,7 @@ class LogSink(proto.Message): the appropriate IAM roles to assign to the identity. Sinks that have a destination that is a log bucket in the - same project as the sink do not have a writer_identity and + same project as the sink cannot have a writer_identity and no additional permissions are required. include_children (bool): Optional. This field applies only to sinks owned by @@ -472,6 +573,90 @@ class VersionFormat(proto.Enum): ) +class BigQueryDataset(proto.Message): + r"""Describes a BigQuery dataset that was created by a link. + + Attributes: + dataset_id (str): + Output only. The full resource name of the BigQuery dataset. + The DATASET_ID will match the ID of the link, so the link + must match the naming restrictions of BigQuery datasets + (alphanumeric characters and underscores only). + + The dataset will have a resource path of + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET_ID]". + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Link(proto.Message): + r"""Describes a link connected to an analytics enabled bucket. + + Attributes: + name (str): + The resource name of the link. The name can have up to 100 + characters. A valid link id (at the end of the link name) + must only have alphanumeric characters and underscores + within it. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + For example: + + \`projects/my-project/locations/global/buckets/my-bucket/links/my_link + description (str): + Describes this link. + + The maximum length of the description is 8000 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + link. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The resource lifecycle state. + bigquery_dataset (google.cloud.logging_v2.types.BigQueryDataset): + The information of a BigQuery Dataset. When a + link is created, a BigQuery dataset is created + along with it, in the same project as the + LogBucket it's linked to. This dataset will also + have BigQuery Views corresponding to the + LogViews in the bucket. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + lifecycle_state: "LifecycleState" = proto.Field( + proto.ENUM, + number=4, + enum="LifecycleState", + ) + bigquery_dataset: "BigQueryDataset" = proto.Field( + proto.MESSAGE, + number=5, + message="BigQueryDataset", + ) + + class BigQueryOptions(proto.Message): r"""Options that change functionality of a sink exporting data to BigQuery. @@ -827,7 +1012,10 @@ class CreateViewRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` view_id (str): - Required. The id to use for this view. + Required. A client-assigned identifier such as + ``"my-view"``. Identifiers are limited to 100 characters and + can include only letters, digits, underscores, hyphens, and + periods. view (google.cloud.logging_v2.types.LogView): Required. The new view. """ @@ -1186,6 +1374,144 @@ class DeleteSinkRequest(proto.Message): ) +class CreateLinkRequest(proto.Message): + r"""The parameters to CreateLink. + + Attributes: + parent (str): + Required. The full resource name of the bucket to create a + link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + link (google.cloud.logging_v2.types.Link): + Required. The new link. + link_id (str): + Required. The ID to use for the link. The link_id can have + up to 100 characters. A valid link_id must only have + alphanumeric characters and underscores within it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + link: "Link" = proto.Field( + proto.MESSAGE, + number=2, + message="Link", + ) + link_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLinkRequest(proto.Message): + r"""The parameters to DeleteLink. + + Attributes: + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLinksRequest(proto.Message): + r"""The parameters to ListLinks. + + Attributes: + parent (str): + Required. The parent resource whose links are to be listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. + page_size (int): + Optional. The maximum number of results to + return from this request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLinksResponse(proto.Message): + r"""The response from ListLinks. + + Attributes: + links (MutableSequence[google.cloud.logging_v2.types.Link]): + A list of links. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call the same method again using the + value of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + links: MutableSequence["Link"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Link", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLinkRequest(proto.Message): + r"""The parameters to GetLink. + + Attributes: + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class LogExclusion(proto.Message): r"""Specifies a set of log entries that are filtered out by a sink. If your Google Cloud resource receives a large volume of log entries, @@ -1611,6 +1937,27 @@ class CmekSettings(proto.Message): See `Enabling CMEK for Log Router `__ for more information. + kms_key_version_name (str): + The CryptoKeyVersion resource name for the configured Cloud + KMS key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]/cryptoKeyVersions/[VERSION]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key/cryptoKeyVersions/1"`` + + This is a read-only field used to convey the specific + configured CryptoKeyVersion of ``kms_key`` that has been + configured. It will be populated in cases where the CMEK + settings are bound to a single key version. + + If this field is populated, the ``kms_key`` is tied to a + specific CryptoKeyVersion. service_account_id (str): Output only. The service account that will be used by the Log Router to access your Cloud KMS key. @@ -1635,6 +1982,10 @@ class CmekSettings(proto.Message): proto.STRING, number=2, ) + kms_key_version_name: str = proto.Field( + proto.STRING, + number=4, + ) service_account_id: str = proto.Field( proto.STRING, number=3, @@ -1937,4 +2288,131 @@ class CopyLogEntriesResponse(proto.Message): ) +class BucketMetadata(proto.Message): + r"""Metadata for LongRunningUpdateBucket Operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_bucket_request (google.cloud.logging_v2.types.CreateBucketRequest): + LongRunningCreateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + update_bucket_request (google.cloud.logging_v2.types.UpdateBucketRequest): + LongRunningUpdateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: "OperationState" = proto.Field( + proto.ENUM, + number=3, + enum="OperationState", + ) + create_bucket_request: "CreateBucketRequest" = proto.Field( + proto.MESSAGE, + number=4, + oneof="request", + message="CreateBucketRequest", + ) + update_bucket_request: "UpdateBucketRequest" = proto.Field( + proto.MESSAGE, + number=5, + oneof="request", + message="UpdateBucketRequest", + ) + + +class LinkMetadata(proto.Message): + r"""Metadata for long running Link operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_link_request (google.cloud.logging_v2.types.CreateLinkRequest): + CreateLink RPC request. + + This field is a member of `oneof`_ ``request``. + delete_link_request (google.cloud.logging_v2.types.DeleteLinkRequest): + DeleteLink RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: "OperationState" = proto.Field( + proto.ENUM, + number=3, + enum="OperationState", + ) + create_link_request: "CreateLinkRequest" = proto.Field( + proto.MESSAGE, + number=4, + oneof="request", + message="CreateLinkRequest", + ) + delete_link_request: "DeleteLinkRequest" = proto.Field( + proto.MESSAGE, + number=5, + oneof="request", + message="DeleteLinkRequest", + ) + + +class LocationMetadata(proto.Message): + r"""Cloud Logging specific location metadata. + + Attributes: + log_analytics_enabled (bool): + Indicates whether or not Log Analytics + features are supported in the given location. + """ + + log_analytics_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index 0d31860a0..dd90dd3cb 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -42,6 +42,7 @@ class LogMetric(proto.Message): r"""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an @@ -79,6 +80,17 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. + bucket_name (str): + Optional. The resource name of the Log Bucket that owns the + Log Metric. Only Log Buckets in projects are supported. The + bucket has to be in the same project as the metric. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + If empty, then the Log Metric is considered a non-Bucket Log + Metric. disabled (bool): Optional. If set to True, then this metric is disabled and it does not generate any points. @@ -113,7 +125,7 @@ class LogMetric(proto.Message): distribution logs-based metric to extract the values to record from a log entry. Two functions are supported for value extraction: ``EXTRACT(field)`` or - ``REGEXP_EXTRACT(field, regex)``. The argument are: + ``REGEXP_EXTRACT(field, regex)``. The arguments are: 1. field: The name of the log entry field from which the value is to be extracted. @@ -142,7 +154,7 @@ class LogMetric(proto.Message): ``value_extractor`` field. The extracted value is converted to the type defined in the - label descriptor. If the either the extraction or the type + label descriptor. If either the extraction or the type conversion fails, the label will have a default value. The default value for a string label is an empty string, for an integer label its 0, and for a boolean label its ``false``. @@ -193,6 +205,10 @@ class ApiVersion(proto.Enum): proto.STRING, number=3, ) + bucket_name: str = proto.Field( + proto.STRING, + number=13, + ) disabled: bool = proto.Field( proto.BOOL, number=12, diff --git a/noxfile.py b/noxfile.py index 7ebe500a3..b75e78ac3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,22 +17,32 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -40,36 +50,36 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "flask", "webob", "django", ] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "google-cloud-bigquery", "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", + "opentelemetry-sdk", ] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() -# 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", "system", @@ -78,6 +88,8 @@ "lint_setup_py", "blacken", "docs", + "docfx", + "format", ] # Error if a python version is missing @@ -165,14 +177,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -186,17 +212,13 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 @@ -282,13 +304,22 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -308,12 +339,21 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", @@ -345,10 +385,17 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -380,12 +427,13 @@ def prerelease_deps(session): session.install(*constraints_deps) prerel_deps = [ + "google-cloud-audit-log", "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -411,7 +459,13 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -424,6 +478,9 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -432,4 +489,7 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/owlbot.py b/owlbot.py index 3e932c854..2be8464c2 100644 --- a/owlbot.py +++ b/owlbot.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import glob import json import os import shutil @@ -66,12 +67,13 @@ def place_before(path, text, *before_text, escape=None): s.move([library], excludes=[ "**/gapic_version.py", "setup.py", - "testing/constraints-3.7.txt", + "testing/constraints*.txt", "README.rst", "google/cloud/logging/__init__.py", # generated types are hidden from users "google/cloud/logging_v2/__init__.py", "docs/index.rst", "docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead + "docs/multiprocessing.rst", "scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer ], ) @@ -91,7 +93,9 @@ def place_before(path, text, *before_text, escape=None): "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", + "opentelemetry-sdk" ], + system_test_python_versions=["3.12"], unit_test_external_dependencies=["flask", "webob", "django"], samples=True, ) @@ -107,6 +111,13 @@ def place_before(path, text, *before_text, escape=None): "README.rst", # This repo has a customized README ], ) +s.replace("noxfile.py", +"""prerel_deps = \[ + "protobuf",""", +"""prerel_deps = [ + "google-cloud-audit-log", + "protobuf",""", +) # adjust .trampolinerc for environment tests s.replace(".trampolinerc", "required_envvars[^\)]*\)", "required_envvars+=()") @@ -116,13 +127,6 @@ def place_before(path, text, *before_text, escape=None): 'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"', ) -# don't lint environment tests -s.replace( - ".flake8", - "exclude =", - "exclude =\n # Exclude environment test code.\n tests/environment/**\n", -) - # use conventional commits for renovate bot s.replace( "renovate.json", @@ -139,6 +143,110 @@ def place_before(path, text, *before_text, escape=None): python.py_samples() +# For autogenerated sample code, resolve object paths by finding the specific subpackage +# the object belongs to. This is because we leave out all autogenerated packages from the +# __init__.py of logging_v2. For now, this is manually copy-pasted from the __all__s of each +# subpackage's __init__.py. +gapic_objects = { + "logging_v2.services.config_service_v2": [ + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient" + ], + "logging_v2.services.logging_service_v2": [ + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient" + ], + "logging_v2.services.metrics_service_v2": [ + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient" + ], + "logging_v2.types": [ + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest" + ] +} + +sample_files = glob.glob("samples/generated_samples/logging_v2_*.py") +for subpackage_name in gapic_objects: + for object_name in gapic_objects[subpackage_name]: + text = "logging_v2." + object_name + replacement = subpackage_name + "." + object_name + s.replace(sample_files, text, replacement) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) s.shell.run(["nox", "-s", "blacken"], cwd="samples/snippets", hide_output=False) @@ -147,7 +255,7 @@ def place_before(path, text, *before_text, escape=None): # -------------------------------------------------------------------------- # add shared environment variables to test configs -tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"] +tracked_subdirs = ["continuous", "presubmit", "samples"] for subdir in tracked_subdirs: for path, subdirs, files in os.walk(f".kokoro/{subdir}"): for name in files: diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..2d8ce14b8 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,29 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning + # Remove once Release PR https://github.com/googleapis/python-api-common-protos/pull/191 is merged + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed + ignore:unclosed:ResourceWarning + # Remove after support for Python 3.7 is dropped + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed + ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning + # DeprecationWarnings triggered by Flask 1.0 testing by Flask dependencies in test code + # 3.7 deprecation warnings + ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working:DeprecationWarning + # 3.8 - 3.9 deprecation warnings + ignore:Importing 'itsdangerous.json' is deprecated and will be removed in ItsDangerous 2.1. Use Python's 'json' module instead.:DeprecationWarning + ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3, and in 3.10 it will stop working:DeprecationWarning + # 3.12 deprecation warnings + ignore:Attribute s is deprecated and will be removed in Python 3.14; use value instead:DeprecationWarning + ignore:ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning + ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning + # Remove warning once https://github.com/protocolbuffers/protobuf/issues/17345 is fixed + ignore:.*Please use message_factory.GetMessageClass\(\) instead. SymbolDatabase.GetPrototype\(\) will be removed soon.:UserWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/2046 is fixed + ignore:coroutine 'AsyncMockMixin._execute_mock_call' was never awaited:RuntimeWarning diff --git a/renovate.json b/renovate.json index dde963098..ff5e5c4c6 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] }, diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index dead26544..f4a59244d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CopyLogEntriesRequest( + request = logging_v2.types.CopyLogEntriesRequest( name="name_value", destination="destination_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index 949dde286..896bfe77d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CopyLogEntriesRequest( + request = logging_v2.types.CopyLogEntriesRequest( name="name_value", destination="destination_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index f399b226f..25292de97 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py new file mode 100644 index 000000000..95c692aac --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_bucket_async(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.types.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py new file mode 100644 index 000000000..d1da5bbe7 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_bucket_async(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.types.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 374173f52..395188ef8 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 8f14d777f..10cb193be 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ async def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( + request = logging_v2.types.CreateExclusionRequest( parent="parent_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 11bd92ad5..a52541ea4 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( + request = logging_v2.types.CreateExclusionRequest( parent="parent_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py new file mode 100644 index 000000000..8e4558480 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_link(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.types.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py new file mode 100644 index 000000000..e1f948394 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_link(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.types.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 0fc007986..e83d648a1 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ async def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.CreateSinkRequest( + request = logging_v2.types.CreateSinkRequest( parent="parent_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index d6d2a0d7d..aa694125d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.CreateSinkRequest( + request = logging_v2.types.CreateSinkRequest( parent="parent_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 016f3e70b..f40f1f6d5 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateViewRequest( + request = logging_v2.types.CreateViewRequest( parent="parent_value", view_id="view_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 3ee9567f1..d4f174cf5 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateViewRequest( + request = logging_v2.types.CreateViewRequest( parent="parent_value", view_id="view_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index 6633aa3c4..d56884bf3 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( + request = logging_v2.types.DeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index ec39ca1a0..0b4b38bf4 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( + request = logging_v2.types.DeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 9d6146c57..1004cf6e8 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.types.DeleteExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index bc051e4e0..e3c3f0be0 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.types.DeleteExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py new file mode 100644 index 000000000..30407d37e --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_delete_link(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.types.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py new file mode 100644 index 000000000..8ddbbcaf3 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_delete_link(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.types.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 62a27ea1d..17ed0b302 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( + request = logging_v2.types.DeleteSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index fa7d6f6e7..bc82c3aa0 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( + request = logging_v2.types.DeleteSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index 2fed68bbc..ca77b619c 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteViewRequest( + request = logging_v2.types.DeleteViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 53bda04c9..f6cd1237a 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteViewRequest( + request = logging_v2.types.DeleteViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 1a91ecdd6..fd2b2a966 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetBucketRequest( + request = logging_v2.types.GetBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 337050c45..728615d94 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetBucketRequest( + request = logging_v2.types.GetBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 6998c4b4a..eb661ea33 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.types.GetCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index a91ec6042..cc0b8e49a 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.types.GetCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 83a5bcda4..c3a10df02 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.types.GetExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index 913ec9968..0009b048b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.types.GetExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py new file mode 100644 index 000000000..2f1e4647f --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_link(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.types.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py new file mode 100644 index 000000000..8c8d9ddc1 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_link(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.types.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index c095649bc..41abc13ac 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetSettingsRequest( + request = logging_v2.types.GetSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index 2b5350a5a..93bb910a5 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetSettingsRequest( + request = logging_v2.types.GetSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 0739e175b..bdc293b5b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetSinkRequest( + request = logging_v2.types.GetSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index ff34156f3..a4b12474d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetSinkRequest( + request = logging_v2.types.GetSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5de975ecc..4474f6c28 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetViewRequest( + request = logging_v2.types.GetViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index a9818b572..ce5680889 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetViewRequest( + request = logging_v2.types.GetViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 614e9ec66..d4aebfa95 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListBucketsRequest( + request = logging_v2.types.ListBucketsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 0c7912f7f..7fd1e53de 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListBucketsRequest( + request = logging_v2.types.ListBucketsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index a1aa5ed6f..199b152b9 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.types.ListExclusionsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 8cdac9d12..7eafeb00e 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.types.ListExclusionsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py new file mode 100644 index 000000000..a8eee844c --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_links(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.types.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py new file mode 100644 index 000000000..d6fde0bb7 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_links(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.types.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index ea81f5c8b..33e10deb2 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListSinksRequest( + request = logging_v2.types.ListSinksRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 05a35323d..162119bc1 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListSinksRequest( + request = logging_v2.types.ListSinksRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index c39fb2d9e..43e835259 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListViewsRequest( + request = logging_v2.types.ListViewsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 270e5c145..dbddce32c 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListViewsRequest( + request = logging_v2.types.ListViewsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index eeca015d1..7650a542f 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.types.UndeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 6355e2a62..c320d118e 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.types.UndeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 450f5662a..4566ea26a 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py new file mode 100644 index 000000000..f127c9904 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_bucket_async(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.types.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py new file mode 100644 index 000000000..550aed7fb --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_bucket_async(): + # Create a client + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.types.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 81aa0fb92..bfa58357d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index e1a2421fe..22e799e9d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.types.UpdateCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 1828ce84c..766376e0e 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.types.UpdateCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 873fecb73..a19fbea8c 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ async def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.types.UpdateExclusionRequest( name="name_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 958572b93..3ba3a8f24 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.types.UpdateExclusionRequest( name="name_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index 531b431f1..84a3dd245 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateSettingsRequest( + request = logging_v2.types.UpdateSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 3381a4aad..3754cd2e7 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateSettingsRequest( + request = logging_v2.types.UpdateSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index 400d57897..f2c54208b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ async def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.UpdateSinkRequest( + request = logging_v2.types.UpdateSinkRequest( sink_name="sink_name_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index cc3a1be43..6238f9a10 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.UpdateSinkRequest( + request = logging_v2.types.UpdateSinkRequest( sink_name="sink_name_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 8ccc9f3c3..9ba5fc19f 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateViewRequest( + request = logging_v2.types.UpdateViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 33014bf23..97a2ebe4d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateViewRequest( + request = logging_v2.types.UpdateViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 209dd510d..89495ff0b 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLogRequest( + request = logging_v2.types.DeleteLogRequest( log_name="log_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 86f6c9ccb..24e4cd924 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLogRequest( + request = logging_v2.types.DeleteLogRequest( log_name="log_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index d968835f7..e5226e985 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogEntriesRequest( + request = logging_v2.types.ListLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d200793fa..3fb660f90 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogEntriesRequest( + request = logging_v2.types.ListLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index eebad0bfd..4e3153fd6 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogsRequest( + request = logging_v2.types.ListLogsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 8d132377e..fa44c8cc7 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogsRequest( + request = logging_v2.types.ListLogsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 4b99bc6f0..054520607 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( + request = logging_v2.types.ListMonitoredResourceDescriptorsRequest( ) # Make the request diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index c54a2201f..cd404c1e1 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( + request = logging_v2.types.ListMonitoredResourceDescriptorsRequest( ) # Make the request diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index f1afa6fae..81eb975b9 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,15 +36,15 @@ async def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.TailLogEntriesRequest( + request = logging_v2.types.TailLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # 'logging_v2.types.TailLogEntriesRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 29461c6f8..5d91ceb7e 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,15 +36,15 @@ def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.TailLogEntriesRequest( + request = logging_v2.types.TailLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # 'logging_v2.types.TailLogEntriesRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index bd7954828..300dd7819 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,13 +36,13 @@ async def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - entries = logging_v2.LogEntry() + entries = logging_v2.types.LogEntry() entries.log_name = "log_name_value" - request = logging_v2.WriteLogEntriesRequest( + request = logging_v2.types.WriteLogEntriesRequest( entries=entries, ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index d28fbe7a0..7b16177ce 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,13 +36,13 @@ def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - entries = logging_v2.LogEntry() + entries = logging_v2.types.LogEntry() entries.log_name = "log_name_value" - request = logging_v2.WriteLogEntriesRequest( + request = logging_v2.types.WriteLogEntriesRequest( entries=entries, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index d351980e9..8ea8a849b 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ async def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.CreateLogMetricRequest( + request = logging_v2.types.CreateLogMetricRequest( parent="parent_value", metric=metric, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index bb9a56579..f67758eae 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.CreateLogMetricRequest( + request = logging_v2.types.CreateLogMetricRequest( parent="parent_value", metric=metric, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index 54a73b141..68a5e6c8d 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLogMetricRequest( + request = logging_v2.types.DeleteLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index 73276ef18..462f8bc32 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLogMetricRequest( + request = logging_v2.types.DeleteLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index d6ef03c48..9eeea97fc 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetLogMetricRequest( + request = logging_v2.types.GetLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 6ab2bb57f..cf60c346d 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetLogMetricRequest( + request = logging_v2.types.GetLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 92c92395a..f7bc654c8 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ async def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogMetricsRequest( + request = logging_v2.types.ListLogMetricsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 5a3e64692..4ae9a795e 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,10 +36,10 @@ def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogMetricsRequest( + request = logging_v2.types.ListLogMetricsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 9a794a4cc..2c3669bca 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ async def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.UpdateLogMetricRequest( + request = logging_v2.types.UpdateLogMetricRequest( metric_name="metric_name_value", metric=metric, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 39a6e72e3..2fed9c39e 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,14 +36,14 @@ def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.UpdateLogMetricRequest( + request = logging_v2.types.UpdateLogMetricRequest( metric_name="metric_name_value", metric=metric, ) diff --git a/samples/generated_samples/snippet_metadata_google.logging.v2.json b/samples/generated_samples/snippet_metadata_google.logging.v2.json index 0cf8959de..7d77545ab 100644 --- a/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.6.0" + "version": "3.12.1" }, "snippets": [ { @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -164,6 +164,159 @@ ], "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_bucket_async" + }, + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_bucket_async" + }, + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" + }, { "canonical": true, "clientMethod": { @@ -196,7 +349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -272,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -357,7 +510,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -441,7 +594,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -494,27 +647,31 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "CreateLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" + "type": "google.cloud.logging_v2.types.CreateLinkRequest" }, { "name": "parent", "type": "str" }, { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" }, { "name": "retry", @@ -526,17 +683,105 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_link" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_link" + }, + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_sync", "segments": [ { "end": 56, @@ -554,22 +799,569 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_exclusion" + }, + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" }, { "canonical": true, @@ -578,28 +1370,24 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "DeleteExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -610,25 +1398,24 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "shortName": "delete_exclusion" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -638,22 +1425,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" }, { "canonical": true, @@ -663,19 +1448,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "DeleteLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -687,25 +1476,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_link" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -715,22 +1504,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_async.py" + "title": "logging_v2_generated_config_service_v2_delete_link_async.py" }, { "canonical": true, @@ -739,19 +1528,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "DeleteLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -763,25 +1556,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_link" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -791,22 +1584,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_link_sync.py" }, { "canonical": true, @@ -816,19 +1609,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "DeleteSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -840,16 +1637,16 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_bucket" + "shortName": "delete_sink" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { "end": 49, @@ -880,7 +1677,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" }, { "canonical": true, @@ -889,19 +1686,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "DeleteSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -913,16 +1714,16 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_bucket" + "shortName": "delete_sink" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { "end": 49, @@ -953,7 +1754,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" }, { "canonical": true, @@ -963,23 +1764,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "DeleteView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.DeleteViewRequest" }, { "name": "retry", @@ -991,16 +1788,16 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_exclusion" + "shortName": "delete_view" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { "end": 49, @@ -1031,7 +1828,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { "canonical": true, @@ -1040,23 +1837,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "DeleteView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.DeleteViewRequest" }, { "name": "retry", @@ -1068,16 +1861,16 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_exclusion" + "shortName": "delete_view" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { "end": 49, @@ -1108,7 +1901,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" }, { "canonical": true, @@ -1118,23 +1911,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -1146,24 +1935,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1178,15 +1968,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" }, { "canonical": true, @@ -1195,23 +1987,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -1223,24 +2011,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1255,15 +2044,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { "canonical": true, @@ -1273,19 +2064,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -1297,24 +2088,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1329,15 +2121,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_async.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { "canonical": true, @@ -1346,19 +2140,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -1370,24 +2164,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1402,15 +2197,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { "canonical": true, @@ -1420,19 +2217,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1444,17 +2245,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { "end": 51, @@ -1487,7 +2288,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { "canonical": true, @@ -1496,19 +2297,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1520,17 +2325,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { "end": 51, @@ -1563,7 +2368,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { "canonical": true, @@ -1573,19 +2378,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1597,17 +2406,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async", "segments": [ { "end": 51, @@ -1640,7 +2449,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" + "title": "logging_v2_generated_config_service_v2_get_link_async.py" }, { "canonical": true, @@ -1649,19 +2458,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1673,17 +2486,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_sync", "segments": [ { "end": 51, @@ -1716,7 +2529,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" + "title": "logging_v2_generated_config_service_v2_get_link_sync.py" }, { "canonical": true, @@ -1726,19 +2539,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" }, { "name": "name", @@ -1754,17 +2567,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { "end": 51, @@ -1797,7 +2610,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_get_settings_async.py" }, { "canonical": true, @@ -1806,19 +2619,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" }, { "name": "name", @@ -1834,17 +2647,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { "end": 51, @@ -1877,7 +2690,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" }, { "canonical": true, @@ -1887,22 +2700,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" }, { - "name": "name", + "name": "sink_name", "type": "str" }, { @@ -1915,17 +2728,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, - "description": "Sample for GetSettings", - "file": "logging_v2_generated_config_service_v2_get_settings_async.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { "end": 51, @@ -1958,7 +2771,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_settings_async.py" + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { "canonical": true, @@ -1967,22 +2780,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" }, { - "name": "name", + "name": "sink_name", "type": "str" }, { @@ -1995,17 +2808,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.Settings", - "shortName": "get_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, - "description": "Sample for GetSettings", - "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { "end": 51, @@ -2038,7 +2851,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { "canonical": true, @@ -2048,23 +2861,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -2076,17 +2885,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { "end": 51, @@ -2119,7 +2928,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_async.py" + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { "canonical": true, @@ -2128,23 +2937,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" - }, - { - "name": "sink_name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -2156,17 +2961,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { "end": 51, @@ -2199,7 +3004,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { "canonical": true, @@ -2209,19 +3014,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -2233,25 +3042,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2271,12 +3080,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_async.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { "canonical": true, @@ -2285,19 +3094,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -2309,25 +3122,25 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2347,12 +3160,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_sync.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { "canonical": true, @@ -2362,19 +3175,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" }, { "name": "parent", @@ -2390,17 +3203,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { "end": 52, @@ -2433,7 +3246,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { "canonical": true, @@ -2442,19 +3255,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" }, { "name": "parent", @@ -2470,17 +3283,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { "end": 52, @@ -2513,7 +3326,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { "canonical": true, @@ -2523,19 +3336,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2551,17 +3364,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", + "shortName": "list_links" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async", "segments": [ { "end": 52, @@ -2594,7 +3407,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" + "title": "logging_v2_generated_config_service_v2_list_links_async.py" }, { "canonical": true, @@ -2603,19 +3416,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2631,17 +3444,17 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", + "shortName": "list_links" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_sync", "segments": [ { "end": 52, @@ -2674,7 +3487,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" + "title": "logging_v2_generated_config_service_v2_list_links_sync.py" }, { "canonical": true, @@ -2712,7 +3525,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", @@ -2792,7 +3605,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", @@ -2873,7 +3686,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", @@ -2953,7 +3766,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", @@ -3030,7 +3843,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3103,7 +3916,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3145,6 +3958,159 @@ ], "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_bucket_async" + }, + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_bucket_async" + }, + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" + }, { "canonical": true, "clientMethod": { @@ -3177,7 +4143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -3253,7 +4219,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -3330,7 +4296,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -3406,7 +4372,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -3495,7 +4461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -3583,7 +4549,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -3668,7 +4634,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -3752,7 +4718,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -3841,7 +4807,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -3929,7 +4895,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4006,7 +4972,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -4082,7 +5048,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -4163,7 +5129,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -4240,7 +5206,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -4326,7 +5292,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", @@ -4414,7 +5380,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", @@ -4495,7 +5461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", @@ -4575,7 +5541,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", @@ -4652,7 +5618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", @@ -4728,7 +5694,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", @@ -4805,7 +5771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -4881,7 +5847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -4974,7 +5940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -5066,7 +6032,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -5151,7 +6117,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -5235,7 +6201,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -5316,7 +6282,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -5393,7 +6359,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -5471,7 +6437,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -5551,7 +6517,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -5632,7 +6598,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", @@ -5712,7 +6678,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", @@ -5797,7 +6763,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -5881,7 +6847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", diff --git a/samples/snippets/export_test.py b/samples/snippets/export_test.py index b1ecf4923..845359e09 100644 --- a/samples/snippets/export_test.py +++ b/samples/snippets/export_test.py @@ -13,19 +13,30 @@ # limitations under the License. import os +import re import random import string +import time import backoff -from google.cloud import logging +from google.cloud import logging, storage import pytest import export BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] -TEST_SINK_NAME_TMPL = "example_sink_{}" +TEST_SINK_NAME_TMPL = "example_sink_{}_{}" TEST_SINK_FILTER = "severity>=CRITICAL" +TIMESTAMP = int(time.time()) + +# Threshold beyond which the cleanup_old_sinks fixture will delete +# old sink, in seconds +CLEANUP_THRESHOLD = 7200 # 2 hours + +# Max buckets to delete at a time, to mitigate operation timeout +# issues. To turn off in the future, set to None. +MAX_BUCKETS = 1500 def _random_id(): @@ -34,12 +45,49 @@ def _random_id(): ) -@pytest.yield_fixture -def example_sink(): +def _create_sink_name(): + return TEST_SINK_NAME_TMPL.format(TIMESTAMP, _random_id()) + + +@backoff.on_exception(backoff.expo, Exception, max_time=60, raise_on_giveup=False) +def _delete_object(obj): + obj.delete() + + +# Runs once for entire test suite +@pytest.fixture(scope="module") +def cleanup_old_sinks(): + client = logging.Client() + test_sink_name_regex = ( + r"^" + TEST_SINK_NAME_TMPL.format(r"(\d+)", r"[A-Z0-9]{6}") + r"$" + ) + for sink in client.list_sinks(): + match = re.match(test_sink_name_regex, sink.name) + if match: + sink_timestamp = int(match.group(1)) + if TIMESTAMP - sink_timestamp > CLEANUP_THRESHOLD: + _delete_object(sink) + + storage_client = storage.Client() + + # See _sink_storage_setup in usage_guide.py for details about how + # sinks are named. + test_bucket_name_regex = r"^sink\-storage\-(\d+)$" + for bucket in storage_client.list_buckets(max_results=MAX_BUCKETS): + match = re.match(test_bucket_name_regex, bucket.name) + if match: + # Bucket timestamp is int(time.time() * 1000) + bucket_timestamp = int(match.group(1)) + if TIMESTAMP - bucket_timestamp // 1000 > CLEANUP_THRESHOLD: + _delete_object(bucket) + + +@pytest.fixture +def example_sink(cleanup_old_sinks): client = logging.Client() sink = client.sink( - TEST_SINK_NAME_TMPL.format(_random_id()), + _create_sink_name(), filter_=TEST_SINK_FILTER, destination="storage.googleapis.com/{bucket}".format(bucket=BUCKET), ) @@ -48,10 +96,7 @@ def example_sink(): yield sink - try: - sink.delete() - except Exception: - pass + _delete_object(sink) def test_list(example_sink, capsys): @@ -65,16 +110,13 @@ def eventually_consistent_test(): def test_create(capsys): - sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) + sink_name = _create_sink_name() try: export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: - try: - logging.Client().sink(sink_name).delete() - except Exception: - pass + _delete_object(logging.Client().sink(sink_name)) out, _ = capsys.readouterr() assert sink_name in out diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 1224cbe21..c9a3d1ecb 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index cbd0a47de..37eb1f9aa 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,3 @@ backoff==2.2.1 -pytest==7.4.0 +pytest===7.4.4; python_version == '3.7' +pytest==8.2.2; python_version >= '3.8' diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index d4cc2c363..8a52ee5c6 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.6.0 -google-cloud-bigquery==3.11.4 -google-cloud-storage==2.10.0 -google-cloud-pubsub==2.18.2 +google-cloud-logging==3.10.0 +google-cloud-bigquery==3.25.0 +google-cloud-storage==2.17.0 +google-cloud-pubsub==2.22.0 diff --git a/samples/snippets/snippets.py b/samples/snippets/snippets.py index 39399dcf7..736311e0f 100644 --- a/samples/snippets/snippets.py +++ b/samples/snippets/snippets.py @@ -38,7 +38,7 @@ def write_entry(logger_name): logger.log_text("Hello, world!") # Simple text log with severity. - logger.log_text("Goodbye, world!", severity="ERROR") + logger.log_text("Goodbye, world!", severity="WARNING") # Struct log. The struct can be any JSON-serializable dictionary. logger.log_struct( @@ -46,7 +46,8 @@ def write_entry(logger_name): "name": "King Arthur", "quest": "Find the Holy Grail", "favorite_color": "Blue", - } + }, + severity="INFO", ) print("Wrote logs to {}.".format(logger.name)) diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index 5c9e86990..6dee33798 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -475,13 +475,47 @@ def using_extras(client): def setup_logging(client): import logging - # [START setup_logging] + # [START logging_setup_logging] client.setup_logging(log_level=logging.INFO) - # [END setup_logging] + # [END logging_setup_logging] - # [START setup_logging_excludes] + # [START logging_setup_logging_excludes] client.setup_logging(log_level=logging.INFO, excluded_loggers=("werkzeug",)) - # [END setup_logging_excludes] + # [END logging_setup_logging_excludes] + + +@snippet +def logging_dict_config(client): + # [START logging_dict_config] + import logging.config + + import google.cloud.logging + + client = google.cloud.logging.Client() + + LOGGING = { + "version": 1, + "handlers": { + "cloud_logging_handler": { + "class": "google.cloud.logging.handlers.CloudLoggingHandler", + "client": client, + }, + "structured_log_handler": { + "class": "google.cloud.logging.handlers.StructuredLogHandler" + }, + }, + "root": {"handlers": [], "level": "WARNING"}, + "loggers": { + "cloud_logger": {"handlers": ["cloud_logging_handler"], "level": "INFO"}, + "structured_logger": { + "handlers": ["structured_log_handler"], + "level": "INFO", + }, + }, + } + + logging.config.dictConfig(LOGGING) + # [END logging_dict_config] def _line_no(func): diff --git a/samples/snippets/usage_guide_test.py b/samples/snippets/usage_guide_test.py index f02d82fbd..3f606dd65 100644 --- a/samples/snippets/usage_guide_test.py +++ b/samples/snippets/usage_guide_test.py @@ -88,3 +88,9 @@ def test_client_list_entries(): for item in to_delete: usage_guide._backoff_not_found(item.delete) + + +def test_dict_config(): + client = Client() + + usage_guide.logging_dict_config(client) diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 0018b421d..120b0ddc4 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 1acc11983..8f5e248a0 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 052350089..000000000 --- a/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/setup.py b/setup.py index f43fd0bf9..c80db0467 100644 --- a/setup.py +++ b/setup.py @@ -36,14 +36,19 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", - "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", - "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", - "google-cloud-core >= 2.0.0, <3.0.0dev", - "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-cloud-appengine-logging>=0.1.3, <2.0.0", + "google-cloud-audit-log >= 0.3.1, < 1.0.0", + "google-cloud-core >= 2.0.0, <3.0.0", + "grpc-google-iam-v1 >=0.12.4, <1.0.0", + "opentelemetry-api >= 1.9.0", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" @@ -55,14 +60,10 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -83,13 +84,13 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index ed7f9aed2..981d37ac6 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -2,5 +2,15 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index ed7f9aed2..981d37ac6 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -2,5 +2,15 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt index ed7f9aed2..981d37ac6 100644 --- a/testing/constraints-3.12.txt +++ b/testing/constraints-3.12.txt @@ -2,5 +2,15 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt new file mode 100644 index 000000000..981d37ac6 --- /dev/null +++ b/testing/constraints-3.13.txt @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +proto-plus +protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api + +# optional dependencies +django +flask diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 587626c54..113004889 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,7 +4,22 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.1 +google-auth==2.14.1 proto-plus==1.22.0 -protobuf==3.19.5 +protobuf==3.20.2 google-cloud-core==2.0.0 +google-cloud-appengine-logging==0.1.3 +google-cloud-audit-log==0.3.1 +grpc-google-iam-v1==0.12.4 +opentelemetry-api==1.9.0 + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 \ No newline at end of file diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index ed7f9aed2..76b620077 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1,6 +1,22 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +google-api-core==2.14.0 +google-auth==2.14.1 +proto-plus==1.22.0 +protobuf==4.21.6 +google-cloud-core==2.0.0 +google-cloud-appengine-logging==0.1.3 +google-cloud-audit-log==0.3.1 +grpc-google-iam-v1==0.12.4 +opentelemetry-api==1.9.0 + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index ed7f9aed2..10c5cba87 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -2,5 +2,21 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth proto-plus protobuf +google-cloud-core +google-cloud-appengine-logging +google-cloud-audit-log +grpc-google-iam-v1 +opentelemetry-api==1.9.0 + +# Lower bound testing for optional dependencies +django==3.2 + +# Need specific versions of Flask dependencies for Flask 1.0 to work +flask==1.0.0 +jinja2==2.10.1 +markupsafe==2.0.1 +itsdangerous==2.0.1 +werkzeug==1.0.1 diff --git a/tests/__init__.py b/tests/__init__.py index e8e1c3845..cbf94b283 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 8d39408d3..487ecde62 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -19,6 +19,7 @@ import numbers import os import pytest +import sys import unittest import uuid @@ -33,6 +34,7 @@ import google.cloud.logging from google.cloud._helpers import UTC from google.cloud.logging_v2.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource @@ -117,8 +119,26 @@ def setUpModule(): ) -class TestLogging(unittest.TestCase): +def _cleanup_otel_sdk_modules(f): + """ + Decorator to delete all references to opentelemetry SDK modules after a + testcase is run. Test case should import opentelemetry SDK modules inside + the function. This is to test situations where the opentelemetry SDK + is not imported at all. + """ + + def wrapped(*args, **kwargs): + f(*args, **kwargs) + + # Deleting from sys.modules should be good enough in this use case + for module_name in list(sys.modules.keys()): + if module_name.startswith("opentelemetry.sdk"): + sys.modules.pop(module_name) + + return wrapped + +class TestLogging(unittest.TestCase): JSON_PAYLOAD = { "message": "System test: test_log_struct", "weather": { @@ -193,6 +213,7 @@ def test_list_entry_with_auditlog(self): "methodName": "test", "resourceName": "test", "serviceName": "test", + "requestMetadata": {"callerIp": "127.0.0.1"}, } audit_struct = self._dict_to_struct(audit_dict) @@ -224,6 +245,12 @@ def test_list_entry_with_auditlog(self): protobuf_entry.to_api_repr()["protoPayload"]["methodName"], audit_dict["methodName"], ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["requestMetadata"][ + "callerIp" + ], + audit_dict["requestMetadata"]["callerIp"], + ) def test_list_entry_with_requestlog(self): """ @@ -330,7 +357,7 @@ def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) - now = datetime.utcnow() + now = datetime.now(timezone.utc) loggers = ( [gapic_logger] if Config.use_mtls == "always" @@ -350,7 +377,7 @@ def test_log_text_with_resource(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) - now = datetime.utcnow() + now = datetime.now(timezone.utc) loggers = ( [gapic_logger] if Config.use_mtls == "always" @@ -596,10 +623,10 @@ def test_handlers_w_extras(self): "trace_sampled": True, "http_request": expected_request, "source_location": expected_source, - "resource": Resource(type="cloudiot_device", labels={}), + "resource": Resource(type="global", labels={}), "labels": {"test-label": "manual"}, } - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -628,7 +655,7 @@ def test_handlers_w_json_fields(self): cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) extra = {"json_fields": {"hello": "world", "two": 2}} - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -656,6 +683,109 @@ def test_log_root_handler(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) + @_cleanup_otel_sdk_modules + def test_log_handler_otel_integration(self): + # Doing OTel imports here to not taint the other tests with OTel SDK imports + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + + LOG_MESSAGE = "This is a test of OpenTelemetry" + LOGGER_NAME = "otel-integration" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + # Set up OTel SDK + provider = TracerProvider() + + tracer = provider.get_tracer("test_system") + with tracer.start_as_current_span("test-span") as span: + context = span.get_span_context() + expected_trace_id = f"projects/{Config.CLIENT.project}/traces/{trace.format_trace_id(context.trace_id)}" + expected_span_id = trace.format_span_id(context.span_id) + expected_tracesampled = context.trace_flags.sampled + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warning(LOG_MESSAGE) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, expected_trace_id) + self.assertEqual(entries[0].span_id, expected_span_id) + self.assertTrue(entries[0].trace_sampled, expected_tracesampled) + + def test_log_handler_close(self): + from multiprocessing import Process + + LOG_MESSAGE = "This is a test of handler.close before exiting." + LOGGER_NAME = "close-test" + handler_name = self._logger_name(LOGGER_NAME) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler_name) + self.to_delete.append(logger) + + # Run a simulation of logging an entry then immediately shutting down. + # The .close() function before the process exits should prevent the + # thread shutdown error and let us log the message. + def subprocess_main(): + # logger.delete and logger.list_entries work by filtering on log name, so we + # can create new objects with the same name and have the queries on the parent + # process still work. + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=BackgroundThreadTransport + ) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warning(LOG_MESSAGE) + handler.close() + + proc = Process(target=subprocess_main) + proc.start() + proc.join() + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, LOG_MESSAGE) + + def test_log_client_flush_handlers(self): + from multiprocessing import Process + + LOG_MESSAGE = "This is a test of client.flush_handlers before exiting." + LOGGER_NAME = "close-test" + handler_name = self._logger_name(LOGGER_NAME) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler_name) + self.to_delete.append(logger) + + # Run a simulation of logging an entry then immediately shutting down. + # The .close() function before the process exits should prevent the + # thread shutdown error and let us log the message. + def subprocess_main(): + # logger.delete and logger.list_entries work by filtering on log name, so we + # can create new objects with the same name and have the queries on the parent + # process still work. + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=BackgroundThreadTransport + ) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + cloud_logger.warning(LOG_MESSAGE) + Config.CLIENT.flush_handlers() + + proc = Process(target=subprocess_main) + proc.start() + proc.join() + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, LOG_MESSAGE) + def test_create_metric(self): METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,) metric = Config.CLIENT.metric( @@ -807,7 +937,7 @@ def _init_bigquery_dataset(self): # Stackdriver Logging to write into it. retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable)) bigquery_client = bigquery.Client() - dataset_ref = bigquery_client.dataset(dataset_name) + dataset_ref = bigquery.DatasetReference(Config.CLIENT.project, dataset_name) dataset = retry(bigquery_client.create_dataset)(bigquery.Dataset(dataset_ref)) self.to_delete.append((bigquery_client, dataset)) bigquery_client.get_dataset(dataset) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index e8e1c3845..cbf94b283 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index e8e1c3845..cbf94b283 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/logging_v2/__init__.py b/tests/unit/gapic/logging_v2/__init__.py index e8e1c3845..cbf94b283 100644 --- a/tests/unit/gapic/logging_v2/__init__.py +++ b/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index be77714c0..73a8f5d32 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,11 +24,20 @@ import grpc from grpc.experimental import aio +import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -39,6 +48,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( @@ -48,17 +58,40 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -70,6 +103,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -99,6 +143,237 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ConfigServiceV2Client._get_client_cert_source(None, False) is None + assert ( + ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ConfigServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ConfigServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ConfigServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ConfigServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ConfigServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ConfigServiceV2Client._get_universe_domain(None, None) + == ConfigServiceV2Client._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -201,13 +476,13 @@ def test_config_service_v2_client_get_transport_class(): ) @mock.patch.object( ConfigServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), ) @mock.patch.object( ConfigServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), ) def test_config_service_v2_client_client_options( client_class, transport_class, transport_name @@ -249,7 +524,9 @@ def test_config_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -279,15 +556,23 @@ def test_config_service_v2_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -297,7 +582,9 @@ def test_config_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -315,7 +602,9 @@ def test_config_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -356,13 +645,13 @@ def test_config_service_v2_client_client_options( ) @mock.patch.object( ConfigServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), ) @mock.patch.object( ConfigServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ConfigServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_config_service_v2_client_mtls_env_auto( @@ -385,7 +674,9 @@ def test_config_service_v2_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -417,7 +708,9 @@ def test_config_service_v2_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -451,7 +744,9 @@ def test_config_service_v2_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -541,6 +836,115 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) +def test_config_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -566,7 +970,9 @@ def test_config_service_v2_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -605,7 +1011,9 @@ def test_config_service_v2_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -665,7 +1073,9 @@ def test_config_service_v2_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -736,27 +1146,119 @@ def test_list_buckets(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request = logging_config.ListBucketsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) assert response.next_page_token == "next_page_token_value" -def test_list_buckets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_buckets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListBucketsRequest( + parent="parent_value", + page_token="page_token_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: - client.list_buckets() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_buckets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + assert args[0] == logging_config.ListBucketsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_buckets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_buckets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc + request = {} + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_buckets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_buckets + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_buckets + ] = mock_rpc + + request = {} + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -764,7 +1266,7 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -785,7 +1287,8 @@ async def test_list_buckets_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request = logging_config.ListBucketsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) @@ -829,7 +1332,7 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -899,7 +1402,7 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -928,7 +1431,7 @@ async def test_list_buckets_flattened_async(): @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -942,7 +1445,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -977,13 +1480,17 @@ def test_list_buckets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_buckets(request={}) + pager = client.list_buckets(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -992,7 +1499,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1034,7 +1541,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1084,7 +1591,7 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1120,9 +1627,11 @@ async def test_list_buckets_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_buckets(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1154,6 +1663,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1161,7 +1671,8 @@ def test_get_bucket(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request = logging_config.GetBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1170,48 +1681,137 @@ def test_get_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True assert response.restricted_fields == ["restricted_fields_value"] -def test_get_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_get_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetBucketRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: - client.get_bucket() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + assert args[0] == logging_config.GetBucketRequest( + name="name_value", + ) -@pytest.mark.asyncio -async def test_get_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest -): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_get_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, + # Ensure method has been cached + assert client._transport.get_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc + request = {} + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_bucket + ] = mock_rpc + + request = {} + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, restricted_fields=["restricted_fields_value"], ) ) @@ -1220,7 +1820,8 @@ async def test_get_bucket_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request = logging_config.GetBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1229,6 +1830,7 @@ async def test_get_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True assert response.restricted_fields == ["restricted_fields_value"] @@ -1269,7 +1871,7 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1305,7 +1907,7 @@ async def test_get_bucket_field_headers_async(): dict, ], ) -def test_create_bucket(request_type, transport: str = "grpc"): +def test_create_bucket_async(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1316,55 +1918,152 @@ def test_create_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) - response = client.create_bucket(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) -def test_create_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: - client.create_bucket() + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + assert args[0] == logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + +def test_create_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_bucket_async in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_bucket_async + ] = mock_rpc + request = {} + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_bucket_async( +async def test_create_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_bucket_async + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket_async + ] = mock_rpc + + request = {} + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1373,41 +2072,31 @@ async def test_create_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.create_bucket(request) + response = await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_create_bucket_async_from_dict(): - await test_create_bucket_async(request_type=dict) +async def test_create_bucket_async_async_from_dict(): + await test_create_bucket_async_async(request_type=dict) -def test_create_bucket_field_headers(): +def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1419,9 +2108,11 @@ def test_create_bucket_field_headers(): request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() - client.create_bucket(request) + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1437,9 +2128,9 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio -async def test_create_bucket_field_headers_async(): +async def test_create_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1449,11 +2140,13 @@ async def test_create_bucket_field_headers_async(): request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + operations_pb2.Operation(name="operations/op") ) - await client.create_bucket(request) + await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1475,7 +2168,7 @@ async def test_create_bucket_field_headers_async(): dict, ], ) -def test_update_bucket(request_type, transport: str = "grpc"): +def test_update_bucket_async(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1486,55 +2179,150 @@ def test_update_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) - response = client.update_bucket(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) -def test_update_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_update_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - client.update_bucket() + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + assert args[0] == logging_config.UpdateBucketRequest( + name="name_value", + ) + + +def test_update_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_bucket_async in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_bucket_async + ] = mock_rpc + request = {} + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_bucket_async( +async def test_update_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_bucket_async + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket_async + ] = mock_rpc + + request = {} + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1543,41 +2331,31 @@ async def test_update_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket( - name="name_value", - description="description_value", - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - restricted_fields=["restricted_fields_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_bucket(request) + response = await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - assert response.restricted_fields == ["restricted_fields_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_bucket_async_from_dict(): - await test_update_bucket_async(request_type=dict) +async def test_update_bucket_async_async_from_dict(): + await test_update_bucket_async_async(request_type=dict) -def test_update_bucket_field_headers(): +def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1589,9 +2367,11 @@ def test_update_bucket_field_headers(): request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: - call.return_value = logging_config.LogBucket() - client.update_bucket(request) + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1607,9 +2387,9 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio -async def test_update_bucket_field_headers_async(): +async def test_update_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1619,11 +2399,13 @@ async def test_update_bucket_field_headers_async(): request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogBucket() + operations_pb2.Operation(name="operations/op") ) - await client.update_bucket(request) + await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1641,11 +2423,11 @@ async def test_update_bucket_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.DeleteBucketRequest, + logging_config.CreateBucketRequest, dict, ], ) -def test_delete_bucket(request_type, transport: str = "grpc"): +def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1656,42 +2438,149 @@ def test_delete_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_bucket(request) + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + response = client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] -def test_delete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - client.delete_bucket() + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + assert args[0] == logging_config.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + +def test_create_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc + request = {} + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest +async def test_create_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket + ] = mock_rpc + + request = {} + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1700,40 +2589,58 @@ async def test_delete_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio -async def test_delete_bucket_async_from_dict(): - await test_delete_bucket_async(request_type=dict) +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) -def test_delete_bucket_field_headers(): +def test_create_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteBucketRequest() + request = logging_config.CreateBucketRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - call.return_value = None - client.delete_bucket(request) + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1744,26 +2651,28 @@ def test_delete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_bucket_field_headers_async(): +async def test_create_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteBucketRequest() + request = logging_config.CreateBucketRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket(request) + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1774,18 +2683,18 @@ async def test_delete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.parametrize( "request_type", [ - logging_config.UndeleteBucketRequest, + logging_config.UpdateBucketRequest, dict, ], ) -def test_undelete_bucket(request_type, transport: str = "grpc"): +def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1796,42 +2705,147 @@ def test_undelete_bucket(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.undelete_bucket(request) + call.return_value = logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + response = client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] -def test_undelete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_update_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - client.undelete_bucket() + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + assert args[0] == logging_config.UpdateBucketRequest( + name="name_value", + ) + + +def test_update_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc + request = {} + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_undelete_bucket_async( - transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +async def test_update_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket + ] = mock_rpc + + request = {} + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1840,40 +2854,58 @@ async def test_undelete_bucket_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogBucket) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio -async def test_undelete_bucket_async_from_dict(): - await test_undelete_bucket_async(request_type=dict) +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) -def test_undelete_bucket_field_headers(): +def test_update_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UndeleteBucketRequest() + request = logging_config.UpdateBucketRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - call.return_value = None - client.undelete_bucket(request) + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -1889,21 +2921,23 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio -async def test_undelete_bucket_field_headers_async(): +async def test_update_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UndeleteBucketRequest() + request = logging_config.UpdateBucketRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket(request) + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) + await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1921,11 +2955,11 @@ async def test_undelete_bucket_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.ListViewsRequest, + logging_config.DeleteBucketRequest, dict, ], ) -def test_list_views(request_type, transport: str = "grpc"): +def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1936,45 +2970,132 @@ def test_list_views(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_views(request) + call.return_value = None + response = client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request = logging_config.DeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_views_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_delete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteBucketRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - client.list_views() + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + assert args[0] == logging_config.DeleteBucketRequest( + name="name_value", + ) + + +def test_delete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc + request = {} + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_views_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest +async def test_delete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_bucket + ] = mock_rpc + + request = {} + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1983,45 +3104,41 @@ async def test_list_views_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_views(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request = logging_config.DeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.asyncio -async def test_list_views_async_from_dict(): - await test_list_views_async(request_type=dict) +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) -def test_list_views_field_headers(): +def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListViewsRequest() + request = logging_config.DeleteBucketRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = logging_config.ListViewsResponse() - client.list_views(request) + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = None + client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2032,28 +3149,26 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_views_field_headers_async(): +async def test_delete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListViewsRequest() + request = logging_config.DeleteBucketRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse() - ) - await client.list_views(request) + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2064,392 +3179,197 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_views_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UndeleteBucketRequest, + dict, + ], +) +def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_views( - parent="parent_value", - ) + call.return_value = None + response = client.undelete_bucket(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + request = logging_config.UndeleteBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None -def test_list_views_flattened_error(): +def test_undelete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_views( - logging_config.ListViewsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_views_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UndeleteBucketRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListViewsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse() + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_views( - parent="parent_value", + client.undelete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest( + name="name_value", ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val +def test_undelete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.asyncio -async def test_list_views_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Ensure method has been cached + assert client._transport.undelete_bucket in client._transport._wrapped_methods - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_views( - logging_config.ListViewsRequest(), - parent="parent_value", + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc + request = {} + client.undelete_bucket(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_list_views_pager(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) + client.undelete_bucket(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_undelete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.undelete_bucket + in client._client._transport._wrapped_methods ) - pager = client.list_views(request={}) - assert pager._metadata == metadata + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.undelete_bucket + ] = mock_rpc - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) for i in results) + request = {} + await client.undelete_bucket(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_list_views_pages(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + await client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_undelete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_views), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_views_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_views( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogView) for i in responses) - - -@pytest.mark.asyncio -async def test_list_views_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token="abc", - ), - logging_config.ListViewsResponse( - views=[], - next_page_token="def", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token="ghi", - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_views(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - logging_config.GetViewRequest, - dict, - ], -) -def test_get_view(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) - response = client.get_view(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - - -def test_get_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - -@pytest.mark.asyncio -async def test_get_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest -): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) - ) - response = await client.get_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request = logging_config.UndeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + assert response is None @pytest.mark.asyncio -async def test_get_view_async_from_dict(): - await test_get_view_async(request_type=dict) +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) -def test_get_view_field_headers(): +def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetViewRequest() + request = logging_config.UndeleteBucketRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = logging_config.LogView() - client.get_view(request) + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = None + client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2465,23 +3385,21 @@ def test_get_view_field_headers(): @pytest.mark.asyncio -async def test_get_view_field_headers_async(): +async def test_undelete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetViewRequest() + request = logging_config.UndeleteBucketRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView() - ) - await client.get_view(request) + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2499,11 +3417,11 @@ async def test_get_view_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.CreateViewRequest, + logging_config.ListViewsRequest, dict, ], ) -def test_create_view(request_type, transport: str = "grpc"): +def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2514,49 +3432,135 @@ def test_create_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + call.return_value = logging_config.ListViewsResponse( + next_page_token="next_page_token_value", ) - response = client.create_view(request) + response = client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request = logging_config.ListViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - client.create_view() + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_views(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + assert args[0] == logging_config.ListViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_rpc + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_views_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2565,49 +3569,46 @@ async def test_create_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_view(request) + response = await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request = logging_config.ListViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_view_async_from_dict(): - await test_create_view_async(request_type=dict) +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) -def test_create_view_field_headers(): +def test_list_views_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateViewRequest() + request = logging_config.ListViewsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: - call.return_value = logging_config.LogView() - client.create_view(request) + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2623,23 +3624,23 @@ def test_create_view_field_headers(): @pytest.mark.asyncio -async def test_create_view_field_headers_async(): +async def test_list_views_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateViewRequest() + request = logging_config.ListViewsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView() + logging_config.ListViewsResponse() ) - await client.create_view(request) + await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2654,172 +3655,290 @@ async def test_create_view_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateViewRequest, - dict, - ], -) -def test_update_view(request_type, transport: str = "grpc"): +def test_list_views_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", + call.return_value = logging_config.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent="parent_value", ) - response = client.update_view(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_views_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), + parent="parent_value", + ) @pytest.mark.asyncio -async def test_update_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest -): +async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=async_anonymous_credentials(), ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView( - name="name_value", - description="description_value", - filter="filter_value", - ) + logging_config.ListViewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent="parent_value", ) - response = await client.update_view(request) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying call was made with the expected + # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_update_view_async_from_dict(): - await test_update_view_async(request_type=dict) +async def test_list_views_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + logging_config.ListViewsRequest(), + parent="parent_value", + ) -def test_update_view_field_headers(): +def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateViewRequest() - - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = logging_config.LogView() - client.update_view(request) + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_views(request={}, retry=retry, timeout=timeout) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) for i in results) -@pytest.mark.asyncio -async def test_update_view_field_headers_async(): - client = ConfigServiceV2AsyncClient( +def test_list_views_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateViewRequest() - - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogView() + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, ) - await client.update_view(request) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) for i in responses) + + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token="abc", + ), + logging_config.ListViewsResponse( + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - logging_config.DeleteViewRequest, + logging_config.GetViewRequest, dict, ], ) -def test_delete_view(request_type, transport: str = "grpc"): +def test_get_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2830,42 +3949,137 @@ def test_delete_view(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_view(request) + call.return_value = logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + response = client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request = logging_config.GetViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" -def test_delete_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetViewRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - client.delete_view() + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + assert args[0] == logging_config.GetViewRequest( + name="name_value", + ) + + +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_view_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_rpc + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2874,40 +4088,50 @@ async def test_delete_view_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + response = await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request = logging_config.GetViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_delete_view_async_from_dict(): - await test_delete_view_async(request_type=dict) +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) -def test_delete_view_field_headers(): +def test_get_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteViewRequest() + request = logging_config.GetViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = None - client.delete_view(request) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2923,21 +4147,23 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio -async def test_delete_view_field_headers_async(): +async def test_get_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteViewRequest() + request = logging_config.GetViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) + await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2955,11 +4181,11 @@ async def test_delete_view_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - logging_config.ListSinksRequest, + logging_config.CreateViewRequest, dict, ], ) -def test_list_sinks(request_type, transport: str = "grpc"): +def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2970,45 +4196,141 @@ def test_list_sinks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse( - next_page_token="next_page_token_value", + call.return_value = logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", ) - response = client.list_sinks(request) + response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request = logging_config.CreateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" -def test_list_sinks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - client.list_sinks() + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + assert args[0] == logging_config.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + +def test_create_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + request = {} + client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_sinks_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest +async def test_create_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_view + ] = mock_rpc + + request = {} + await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3017,45 +4339,50 @@ async def test_list_sinks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse( - next_page_token="next_page_token_value", + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", ) ) - response = await client.list_sinks(request) + response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request = logging_config.CreateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_list_sinks_async_from_dict(): - await test_list_sinks_async(request_type=dict) +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) -def test_list_sinks_field_headers(): +def test_create_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListSinksRequest() + request = logging_config.CreateViewRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - call.return_value = logging_config.ListSinksResponse() - client.list_sinks(request) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3071,23 +4398,23 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio -async def test_list_sinks_field_headers_async(): +async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListSinksRequest() + request = logging_config.CreateViewRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse() + logging_config.LogView() ) - await client.list_sinks(request) + await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3102,411 +4429,209 @@ async def test_list_sinks_field_headers_async(): ) in kw["metadata"] -def test_list_sinks_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateViewRequest, + dict, + ], +) +def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sinks( - parent="parent_value", + call.return_value = logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", ) + response = client.update_view(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + request = logging_config.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" -def test_list_sinks_flattened_error(): +def test_update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sinks( - logging_config.ListSinksRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_sinks_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateViewRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListSinksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse() + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sinks( - parent="parent_value", + client.update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest( + name="name_value", ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -@pytest.mark.asyncio -async def test_list_sinks_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sinks( - logging_config.ListSinksRequest(), - parent="parent_value", + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + request = {} + client.update_view(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_list_sinks_pager(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) + client.update_view(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods ) - pager = client.list_sinks(request={}) - assert pager._metadata == metadata + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_rpc - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) for i in results) + request = {} + await client.update_view(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_list_sinks_pages(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sinks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_sinks_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sinks( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogSink) for i in responses) - - -@pytest.mark.asyncio -async def test_list_sinks_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token="abc", - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token="def", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token="ghi", - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in ( - await client.list_sinks(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - logging_config.GetSinkRequest, - dict, - ], -) -def test_get_sink(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - bigquery_options=logging_config.BigQueryOptions( - use_partitioned_tables=True - ), - ) - response = client.get_sink(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True - - -def test_get_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - -@pytest.mark.asyncio -async def test_get_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest -): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( + logging_config.LogView( name="name_value", - destination="destination_value", - filter="filter_value", description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, + filter="filter_value", ) ) - response = await client.get_sink(request) + response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request = logging_config.UpdateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) + assert isinstance(response, logging_config.LogView) assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert response.filter == "filter_value" @pytest.mark.asyncio -async def test_get_sink_async_from_dict(): - await test_get_sink_async(request_type=dict) +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) -def test_get_sink_field_headers(): +def test_update_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetSinkRequest() + request = logging_config.UpdateViewRequest() - request.sink_name = "sink_name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - call.return_value = logging_config.LogSink() - client.get_sink(request) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3517,28 +4642,28 @@ def test_get_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_sink_field_headers_async(): +async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetSinkRequest() + request = logging_config.UpdateViewRequest() - request.sink_name = "sink_name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + logging_config.LogView() ) - await client.get_sink(request) + await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3549,100 +4674,18 @@ async def test_get_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "name=name_value", ) in kw["metadata"] -def test_get_sink_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_sink( - sink_name="sink_name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" - assert arg == mock_val - - -def test_get_sink_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_sink( - logging_config.GetSinkRequest(), - sink_name="sink_name_value", - ) - - -@pytest.mark.asyncio -async def test_get_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_sink( - sink_name="sink_name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_sink( - logging_config.GetSinkRequest(), - sink_name="sink_name_value", - ) - - @pytest.mark.parametrize( "request_type", [ - logging_config.CreateSinkRequest, + logging_config.DeleteViewRequest, dict, ], ) -def test_create_sink(request_type, transport: str = "grpc"): +def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3653,62 +4696,132 @@ def test_create_sink(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - bigquery_options=logging_config.BigQueryOptions( - use_partitioned_tables=True - ), - ) - response = client.create_sink(request) + call.return_value = None + response = client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request = logging_config.DeleteViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert response is None -def test_create_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteViewRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - client.create_sink() + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + assert args[0] == logging_config.DeleteViewRequest( + name="name_value", + ) + + +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_rpc + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3717,59 +4830,41 @@ async def test_create_sink_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - ) - ) - response = await client.create_sink(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request = logging_config.DeleteViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert response is None @pytest.mark.asyncio -async def test_create_sink_async_from_dict(): - await test_create_sink_async(request_type=dict) +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) -def test_create_sink_field_headers(): +def test_delete_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateSinkRequest() + request = logging_config.DeleteViewRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - call.return_value = logging_config.LogSink() - client.create_sink(request) + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3780,28 +4875,26 @@ def test_create_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_sink_field_headers_async(): +async def test_delete_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateSinkRequest() + request = logging_config.DeleteViewRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() - ) - await client.create_sink(request) + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3812,176 +4905,157 @@ async def test_create_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_sink_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListSinksRequest, + dict, + ], +) +def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_sink( - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), + call.return_value = logging_config.ListSinksResponse( + next_page_token="next_page_token_value", ) + response = client.list_sinks(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val + request = logging_config.ListSinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksPager) + assert response.next_page_token == "next_page_token_value" -def test_create_sink_flattened_error(): +def test_list_sinks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_sink( - logging_config.CreateSinkRequest(), - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListSinksRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_sink( + client.list_sinks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest( parent="parent_value", - sink=logging_config.LogSink(name="name_value"), + page_token="page_token_value", ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val +def test_list_sinks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -@pytest.mark.asyncio -async def test_create_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_sink( - logging_config.CreateSinkRequest(), - parent="parent_value", - sink=logging_config.LogSink(name="name_value"), + # Ensure method has been cached + assert client._transport.list_sinks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc + request = {} + client.list_sinks(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateSinkRequest, - dict, - ], -) -def test_update_sink(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + client.list_sinks(request) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, - bigquery_options=logging_config.BigQueryOptions( - use_partitioned_tables=True - ), + +@pytest.mark.asyncio +async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - response = client.update_sink(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + # Ensure method has been cached + assert ( + client._client._transport.list_sinks + in client._client._transport._wrapped_methods + ) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sinks + ] = mock_rpc -def test_update_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + request = {} + await client.list_sinks(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: - client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +async def test_list_sinks_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3990,59 +5064,46 @@ async def test_update_sink_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink( - name="name_value", - destination="destination_value", - filter="filter_value", - description="description_value", - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity="writer_identity_value", - include_children=True, + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.update_sink(request) + response = await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + request = logging_config.ListSinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True + assert isinstance(response, pagers.ListSinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_sink_async_from_dict(): - await test_update_sink_async(request_type=dict) +async def test_list_sinks_async_from_dict(): + await test_list_sinks_async(request_type=dict) -def test_update_sink_field_headers(): +def test_list_sinks_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateSinkRequest() + request = logging_config.ListSinksRequest() - request.sink_name = "sink_name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: - call.return_value = logging_config.LogSink() - client.update_sink(request) + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4053,28 +5114,28 @@ def test_update_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_sink_field_headers_async(): +async def test_list_sinks_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateSinkRequest() + request = logging_config.ListSinksRequest() - request.sink_name = "sink_name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + logging_config.ListSinksResponse() ) - await client.update_sink(request) + await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4085,43 +5146,35 @@ async def test_update_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_sink_flattened(): +def test_list_sinks_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() + call.return_value = logging_config.ListSinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_sink( - sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_sinks( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" - assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_sink_flattened_error(): +def test_list_sinks_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4129,75 +5182,1338 @@ def test_update_sink_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_sink( - logging_config.UpdateSinkRequest(), - sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_sinks( + logging_config.ListSinksRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_sink_flattened_async(): +async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogSink() + call.return_value = logging_config.ListSinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogSink() + logging_config.ListSinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_sink( - sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_sinks( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" - assert arg == mock_val - arg = args[0].sink - mock_val = logging_config.LogSink(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_sink_flattened_error_async(): +async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_sink( - logging_config.UpdateSinkRequest(), - sink_name="sink_name_value", - sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_sinks( + logging_config.ListSinksRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - logging_config.DeleteSinkRequest, - dict, - ], -) +def test_list_sinks_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sinks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) for i in results) + + +def test_list_sinks_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sinks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sinks_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sinks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogSink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sinks_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token="abc", + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sinks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSinkRequest, + dict, + ], +) +def test_get_sink(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + response = client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True + + +def test_get_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest( + sink_name="sink_name_value", + ) + + +def test_get_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc + request = {} + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sink + ] = mock_rpc + + request = {} + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_get_sink_async_from_dict(): + await test_get_sink_async(request_type=dict) + + +def test_get_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = "sink_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = "sink_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] + + +def test_get_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sink( + sink_name="sink_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + + +def test_get_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sink( + logging_config.GetSinkRequest(), + sink_name="sink_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sink( + sink_name="sink_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sink( + logging_config.GetSinkRequest(), + sink_name="sink_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateSinkRequest, + dict, + ], +) +def test_create_sink(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + response = client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True + + +def test_create_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateSinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest( + parent="parent_value", + ) + + +def test_create_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc + request = {} + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_sink + ] = mock_rpc + + request = {} + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_create_sink_async_from_dict(): + await test_create_sink_async(request_type=dict) + + +def test_create_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_sink( + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + + +def test_create_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sink( + logging_config.CreateSinkRequest(), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_sink( + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_sink( + logging_config.CreateSinkRequest(), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSinkRequest, + dict, + ], +) +def test_update_sink(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + response = client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True + + +def test_update_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSinkRequest( + sink_name="sink_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest( + sink_name="sink_name_value", + ) + + +def test_update_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc + request = {} + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_sink + ] = mock_rpc + + request = {} + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + response = await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == "writer_identity_value" + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_update_sink_async_from_dict(): + await test_update_sink_async(request_type=dict) + + +def test_update_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = "sink_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = "sink_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] + + +def test_update_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_sink( + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_sink( + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteSinkRequest, + dict, + ], +) def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4209,42 +6525,4115 @@ def test_delete_sink(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + +def test_delete_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc + request = {} + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_sink + ] = mock_rpc + + request = {} + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_sink_async_from_dict(): + await test_delete_sink_async(request_type=dict) + + +def test_delete_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = "sink_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = None + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = "sink_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] + + +def test_delete_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_sink( + sink_name="sink_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + + +def test_delete_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_sink( + sink_name="sink_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateLinkRequest, + dict, + ], +) +def test_create_link(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + +def test_create_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_link] = mock_rpc + request = {} + client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_link + ] = mock_rpc + + request = {} + await client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_link_async_from_dict(): + await test_create_link_async(request_type=dict) + + +def test_create_link_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_link_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_link_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_link( + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name="name_value") + assert arg == mock_val + arg = args[0].link_id + mock_val = "link_id_value" + assert arg == mock_val + + +def test_create_link_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_link( + logging_config.CreateLinkRequest(), + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_link_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_link( + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name="name_value") + assert arg == mock_val + arg = args[0].link_id + mock_val = "link_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_link_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_link( + logging_config.CreateLinkRequest(), + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteLinkRequest, + dict, + ], +) +def test_delete_link(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest( + name="name_value", + ) + + +def test_delete_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc + request = {} + client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_link + ] = mock_rpc + + request = {} + await client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_link_async_from_dict(): + await test_delete_link_async(request_type=dict) + + +def test_delete_link_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_link_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_link_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_link_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_link( + logging_config.DeleteLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_link_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_link_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_link( + logging_config.DeleteLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListLinksRequest, + dict, + ], +) +def test_list_links(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_links in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_links] = mock_rpc + request = {} + client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_links + ] = mock_rpc + + request = {} + await client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_links_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_links_async_from_dict(): + await test_list_links_async(request_type=dict) + + +def test_list_links_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = logging_config.ListLinksResponse() + client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_links_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) + await client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_links_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_links_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_links( + logging_config.ListLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_links_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_links_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_links( + logging_config.ListLinksRequest(), + parent="parent_value", + ) + + +def test_list_links_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_links(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.Link) for i in results) + + +def test_list_links_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = list(client.list_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_links_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.Link) for i in responses) + + +@pytest.mark.asyncio +async def test_list_links_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token="abc", + ), + logging_config.ListLinksResponse( + links=[], + next_page_token="def", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token="ghi", + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetLinkRequest, + dict, + ], +) +def test_get_link(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Link) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest( + name="name_value", + ) + + +def test_get_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_link] = mock_rpc + request = {} + client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_link + ] = mock_rpc + + request = {} + await client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) + response = await client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Link) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_link_async_from_dict(): + await test_get_link_async(request_type=dict) + + +def test_get_link_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value = logging_config.Link() + client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_link_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + await client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_link_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Link() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_link_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_link( + logging_config.GetLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_link_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Link() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_link_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_link( + logging_config.GetLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListExclusionsRequest, + dict, + ], +) +def test_list_exclusions(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListExclusionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_exclusions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListExclusionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_exclusions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_exclusions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_exclusions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc + request = {} + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_exclusions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_exclusions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_exclusions + ] = mock_rpc + + request = {} + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_exclusions_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListExclusionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) + + +def test_list_exclusions_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_exclusions_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_exclusions_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_exclusions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_exclusions_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_exclusions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent="parent_value", + ) + + +def test_list_exclusions_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_exclusions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) for i in results) + + +def test_list_exclusions_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) for i in responses) + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token="abc", + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token="def", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_exclusions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetExclusionRequest, + dict, + ], +) +def test_get_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +def test_get_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetExclusionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest( + name="name_value", + ) + + +def test_get_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc + request = {} + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_exclusion + ] = mock_rpc + + request = {} + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) + + +def test_get_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateExclusionRequest, + dict, + ], +) +def test_create_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + response = client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +def test_create_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateExclusionRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest( + parent="parent_value", + ) + + +def test_create_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_exclusion + ] = mock_rpc + request = {} + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_exclusion + ] = mock_rpc + + request = {} + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) + + +def test_create_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + + +def test_create_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateExclusionRequest, + dict, + ], +) +def test_update_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + response = client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +def test_update_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateExclusionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest( + name="name_value", + ) + + +def test_update_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_exclusion + ] = mock_rpc + request = {} + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_exclusion + ] = mock_rpc + + request = {} + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + response = await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) + + +def test_update_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteExclusionRequest, + dict, + ], +) +def test_delete_exclusion(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteExclusionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest( + name="name_value", + ) + + +def test_delete_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_exclusion + ] = mock_rpc + request = {} + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_exclusion + ] = mock_rpc + + request = {} + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) + + +def test_delete_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = None + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetCmekSettingsRequest, + dict, + ], +) +def test_get_cmek_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + response = client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" + + +def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetCmekSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest( + name="name_value", + ) + + +def test_get_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cmek_settings + ] = mock_rpc + request = {} + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cmek_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cmek_settings + ] = mock_rpc + + request = {} + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) + response = await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) + + +def test_get_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateCmekSettingsRequest, + dict, + ], +) +def test_update_cmek_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_sink(request) + call.return_value = logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + response = client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" -def test_delete_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateCmekSettingsRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - client.delete_sink() + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + assert args[0] == logging_config.UpdateCmekSettingsRequest( + name="name_value", + ) + + +def test_update_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cmek_settings in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cmek_settings + ] = mock_rpc + request = {} + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_sink_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +async def test_update_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cmek_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cmek_settings + ] = mock_rpc + + request = {} + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async( + transport: str = "grpc_asyncio", + request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4253,40 +10642,56 @@ async def test_delete_sink_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) + response = await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, logging_config.CmekSettings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio -async def test_delete_sink_async_from_dict(): - await test_delete_sink_async(request_type=dict) +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) -def test_delete_sink_field_headers(): +def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteSinkRequest() + request = logging_config.UpdateCmekSettingsRequest() - request.sink_name = "sink_name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - call.return_value = None - client.delete_sink(request) + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4297,26 +10702,30 @@ def test_delete_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_sink_field_headers_async(): +async def test_update_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteSinkRequest() + request = logging_config.UpdateCmekSettingsRequest() - request.sink_name = "sink_name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink(request) + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) + await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4327,147 +10736,165 @@ async def test_delete_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name_value", + "name=name_value", ) in kw["metadata"] -def test_delete_sink_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSettingsRequest, + dict, + ], +) +def test_get_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_sink( - sink_name="sink_name_value", + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, ) + response = client.get_settings(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" - assert arg == mock_val + request = logging_config.GetSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True -def test_delete_sink_flattened_error(): +def test_get_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name="sink_name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_sink_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSettingsRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_sink( - sink_name="sink_name_value", + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + client.get_settings(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].sink_name - mock_val = "sink_name_value" - assert arg == mock_val - + assert args[0] == logging_config.GetSettingsRequest( + name="name_value", + ) -@pytest.mark.asyncio -async def test_delete_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name="sink_name_value", +def test_get_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "request_type", - [ - logging_config.ListExclusionsRequest, - dict, - ], -) -def test_list_exclusions(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Ensure method has been cached + assert client._transport.get_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + request = {} + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + client.get_settings(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse( - next_page_token="next_page_token_value", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - response = client.list_exclusions(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == "next_page_token_value" + # Ensure method has been cached + assert ( + client._client._transport.get_settings + in client._client._transport._wrapped_methods + ) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_settings + ] = mock_rpc -def test_list_exclusions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + request = {} + await client.get_settings(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_exclusions_async( - transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +async def test_get_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4476,45 +10903,54 @@ async def test_list_exclusions_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse( - next_page_token="next_page_token_value", + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, ) ) - response = await client.list_exclusions(request) + response = await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request = logging_config.GetSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True @pytest.mark.asyncio -async def test_list_exclusions_async_from_dict(): - await test_list_exclusions_async(request_type=dict) +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) -def test_list_exclusions_field_headers(): +def test_get_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4525,28 +10961,28 @@ def test_list_exclusions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_exclusions_field_headers_async(): +async def test_get_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.GetSettingsRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse() + logging_config.Settings() ) - await client.list_exclusions(request) + await client.get_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4557,288 +10993,100 @@ async def test_list_exclusions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_exclusions_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_exclusions( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_exclusions_flattened_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListExclusionsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_exclusions( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_exclusions_flattened_error_async(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent="parent_value", - ) - - -def test_list_exclusions_pager(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_exclusions(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) for i in results) - - -def test_list_exclusions_pages(transport_name: str = "grpc"): +def test_get_settings_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_settings_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + logging_config.GetSettingsRequest(), + name="name_value", ) - pages = list(client.list_exclusions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_exclusions_async_pager(): +async def test_get_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() ) - async_pager = await client.list_exclusions( - request={}, + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings( + name="name_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) for i in responses) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_list_exclusions_async_pages(): +async def test_get_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token="abc", - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token="def", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token="ghi", - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_settings( + logging_config.GetSettingsRequest(), + name="name_value", ) - pages = [] - async for page_ in ( - await client.list_exclusions(request={}) - ).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - logging_config.GetExclusionRequest, + logging_config.UpdateSettingsRequest, dict, ], ) -def test_get_exclusion(request_type, transport: str = "grpc"): +def test_update_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4849,51 +11097,143 @@ def test_get_exclusion(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( + call.return_value = logging_config.Settings( name="name_value", - description="description_value", - filter="filter_value", - disabled=True, + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, ) - response = client.get_exclusion(request) + response = client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request = logging_config.UpdateSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.Settings) assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True -def test_get_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_update_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSettingsRequest( + name="name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - client.get_exclusion() + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.UpdateSettingsRequest( + name="name_value", + ) + + +def test_update_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + request = {} + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_settings + ] = mock_rpc + + request = {} + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4902,51 +11242,54 @@ async def test_get_exclusion_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( + logging_config.Settings( name="name_value", - description="description_value", - filter="filter_value", - disabled=True, + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, ) ) - response = await client.get_exclusion(request) + response = await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request = logging_config.UpdateSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.Settings) assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True @pytest.mark.asyncio -async def test_get_exclusion_async_from_dict(): - await test_get_exclusion_async(request_type=dict) +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) -def test_get_exclusion_field_headers(): +def test_update_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.UpdateSettingsRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: - call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4962,23 +11305,23 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio -async def test_get_exclusion_field_headers_async(): +async def test_update_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.UpdateSettingsRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() + logging_config.Settings() ) - await client.get_exclusion(request) + await client.update_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4993,31 +11336,35 @@ async def test_get_exclusion_field_headers_async(): ) in kw["metadata"] -def test_get_exclusion_flattened(): +def test_update_settings_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_exclusion( - name="name_value", + client.update_settings( + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = logging_config.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_exclusion_flattened_error(): +def test_update_settings_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5025,64 +11372,70 @@ def test_get_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), - name="name_value", + client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_exclusion_flattened_async(): +async def test_update_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.Settings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() + logging_config.Settings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_exclusion( - name="name_value", + response = await client.update_settings( + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].settings + mock_val = logging_config.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_exclusion_flattened_error_async(): +async def test_update_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_exclusion( - logging_config.GetExclusionRequest(), - name="name_value", + await client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - logging_config.CreateExclusionRequest, + logging_config.CopyLogEntriesRequest, dict, ], ) -def test_create_exclusion(request_type, transport: str = "grpc"): +def test_copy_log_entries(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5093,51 +11446,148 @@ def test_create_exclusion(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - response = client.create_exclusion(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.copy_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert isinstance(response, future.Future) -def test_create_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_copy_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CopyLogEntriesRequest( + name="name_value", + filter="filter_value", + destination="destination_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - client.create_exclusion() + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.copy_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.CopyLogEntriesRequest( + name="name_value", + filter="filter_value", + destination="destination_value", + ) + + +def test_copy_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.copy_log_entries + ] = mock_rpc + request = {} + client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_create_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +async def test_copy_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.copy_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.copy_log_entries + ] = mock_rpc + + request = {} + await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_copy_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5146,1701 +11596,1737 @@ async def test_create_exclusion_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.create_exclusion(request) + response = await client.copy_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_create_exclusion_async_from_dict(): - await test_create_exclusion_async(request_type=dict) +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) -def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - request.parent = "parent_value" + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport -@pytest.mark.asyncio -async def test_create_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ConfigServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() - request.parent = "parent_value" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_buckets_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() - ) - await client.create_exclusion(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.ListBucketsRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + assert args[0] == request_msg -def test_create_exclusion_flattened(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_exclusion( - parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") - assert arg == mock_val + request_msg = logging_config.GetBucketRequest() + assert args[0] == request_msg -def test_create_exclusion_flattened_error(): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_async_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), - parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_create_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_exclusion( - parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_bucket_async(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") - assert arg == mock_val + request_msg = logging_config.UpdateBucketRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_create_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_exclusion( - logging_config.CreateExclusionRequest(), - parent="parent_value", - exclusion=logging_config.LogExclusion(name="name_value"), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateExclusionRequest, - dict, - ], -) -def test_update_exclusion(request_type, transport: str = "grpc"): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - response = client.update_exclusion(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request_msg = logging_config.UpdateBucketRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert args[0] == request_msg -def test_update_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - client.update_exclusion() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value = None + client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request_msg = logging_config.DeleteBucketRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_update_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest -): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion( - name="name_value", - description="description_value", - filter="filter_value", - disabled=True, - ) - ) - response = await client.update_exclusion(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value = None + client.undelete_bucket(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request_msg = logging_config.UndeleteBucketRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_update_exclusion_async_from_dict(): - await test_update_exclusion_async(request_type=dict) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_views_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request=None) -def test_update_exclusion_field_headers(): + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_view_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.get_view(request=None) - request.name = "name_value" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) + assert args[0] == request_msg - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.CreateViewRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_update_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_view_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = logging_config.LogView() + client.update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + - request.name = "name_value" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() - ) - await client.update_exclusion(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value = None + client.delete_view(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.DeleteViewRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -def test_update_exclusion_flattened(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sinks_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_exclusion( - name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + request_msg = logging_config.ListSinksRequest() + assert args[0] == request_msg -def test_update_exclusion_flattened_error(): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sink_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), - name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.get_sink(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() -@pytest.mark.asyncio -async def test_update_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sink_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.LogExclusion() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_exclusion( - name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.create_sink(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + request_msg = logging_config.CreateSinkRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_update_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_sink_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_exclusion( - logging_config.UpdateExclusionRequest(), - name="name_value", - exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = logging_config.LogSink() + client.update_sink(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() -@pytest.mark.parametrize( - "request_type", - [ - logging_config.DeleteExclusionRequest, - dict, - ], -) -def test_delete_exclusion(request_type, transport: str = "grpc"): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sink_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None - response = client.delete_exclusion(request) + client.delete_sink(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request_msg = logging_config.DeleteSinkRequest() - # Establish that the response is the type that we expect. - assert response is None + assert args[0] == request_msg -def test_delete_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_link_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - client.delete_exclusion() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_link(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request_msg = logging_config.CreateLinkRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_delete_exclusion_async( - transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest -): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_link_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_link(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - # Establish that the response is the type that we expect. - assert response is None + request_msg = logging_config.DeleteLinkRequest() - -@pytest.mark.asyncio -async def test_delete_exclusion_async_from_dict(): - await test_delete_exclusion_async(request_type=dict) + assert args[0] == request_msg -def test_delete_exclusion_field_headers(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_links_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - call.return_value = None - client.delete_exclusion(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = logging_config.ListLinksResponse() + client.list_links(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.ListLinksRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_delete_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_link_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value = logging_config.Link() + client.get_link(request=None) - request.name = "name_value" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) + assert args[0] == request_msg - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_exclusions_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.ListExclusionsRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -def test_delete_exclusion_flattened(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_exclusion_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_exclusion( - name="name_value", - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request_msg = logging_config.GetExclusionRequest() + assert args[0] == request_msg -def test_delete_exclusion_flattened_error(): + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_exclusion_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name="name_value", - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() -@pytest.mark.asyncio -async def test_delete_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient( + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_exclusion( - name="name_value", - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request_msg = logging_config.UpdateExclusionRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_delete_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name="name_value", - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value = None + client.delete_exclusion(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteExclusionRequest() -@pytest.mark.parametrize( - "request_type", - [ - logging_config.GetCmekSettingsRequest, - dict, - ], -) -def test_get_cmek_settings(request_type, transport: str = "grpc"): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_cmek_settings_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_cmek_settings), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", - ) - response = client.get_cmek_settings(request) + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request_msg = logging_config.GetCmekSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + assert args[0] == request_msg -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_cmek_settings_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" + type(client.transport.update_cmek_settings), "__call__" ) as call: - client.get_cmek_settings() + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request_msg = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_get_cmek_settings_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest -): - client = ConfigServiceV2AsyncClient( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_settings_empty_call_grpc(): + client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", - ) - ) - response = await client.get_cmek_settings(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.get_settings(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request_msg = logging_config.GetSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + assert args[0] == request_msg -@pytest.mark.asyncio -async def test_get_cmek_settings_async_from_dict(): - await test_get_cmek_settings_async(request_type=dict) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.update_settings(request=None) -def test_get_cmek_settings_field_headers(): + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_log_entries_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.copy_log_entries(request=None) - request.name = "name_value" + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: - call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) + assert args[0] == request_msg - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] +def test_transport_kind_grpc_asyncio(): + transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" -@pytest.mark.asyncio -async def test_get_cmek_settings_field_headers_async(): +def test_initialize_client_w_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) + assert client is not None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() - request.name = "name_value" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_buckets_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), "__call__" - ) as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings() + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) ) - await client.get_cmek_settings(request) + await client.list_buckets(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.ListBucketsRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateCmekSettingsRequest, - dict, - ], -) -def test_update_cmek_settings(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) ) - response = client.update_cmek_settings(request) + await client.get_bucket(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request_msg = logging_config.GetBucketRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + assert args[0] == request_msg -def test_update_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" + type(client.transport.create_bucket_async), "__call__" ) as call: - client.update_cmek_settings() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request_msg = logging_config.CreateBucketRequest() + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_cmek_settings_async( - transport: str = "grpc_asyncio", - request_type=logging_config.UpdateCmekSettingsRequest, -): +async def test_update_bucket_async_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" + type(client.transport.update_bucket_async), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings( - name="name_value", - kms_key_name="kms_key_name_value", - service_account_id="service_account_id_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_cmek_settings(request) + await client.update_bucket_async(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request_msg = logging_config.UpdateBucketRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_cmek_settings_async_from_dict(): - await test_update_cmek_settings_async(request_type=dict) - - -def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), +async def test_create_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: - call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) + await client.create_bucket(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.CreateBucketRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_cmek_settings_field_headers_async(): +async def test_update_bucket_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), "__call__" - ) as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.CmekSettings() + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) ) - await client.update_cmek_settings(request) + await client.update_bucket(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.UpdateBucketRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -@pytest.mark.parametrize( - "request_type", - [ - logging_config.GetSettingsRequest, - dict, - ], -) -def test_get_settings(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, - ) - response = client.get_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request_msg = logging_config.DeleteBucketRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True + assert args[0] == request_msg -def test_get_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - client.get_settings() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_settings_async( - transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest -): +async def test_list_views_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_settings(request) + await client.list_views(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request_msg = logging_config.ListViewsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_settings_async_from_dict(): - await test_get_settings_async(request_type=dict) - - -def test_get_settings_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), +async def test_get_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: - call.return_value = logging_config.Settings() - client.get_settings(request) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) + await client.get_view(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.GetViewRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_settings_field_headers_async(): +async def test_create_view_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.GetSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings() + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) ) - await client.get_settings(request) + await client.create_view(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.CreateViewRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -def test_get_settings_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_settings( - name="name_value", + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) ) + await client.update_view(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_settings_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) + request_msg = logging_config.UpdateViewRequest() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_settings( - logging_config.GetSettingsRequest(), - name="name_value", - ) + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_settings_flattened_async(): +async def test_delete_view_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_settings( - name="name_value", - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request_msg = logging_config.DeleteViewRequest() + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_settings_flattened_error_async(): +async def test_list_sinks_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_settings( - logging_config.GetSettingsRequest(), - name="name_value", + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) ) + await client.list_sinks(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() -@pytest.mark.parametrize( - "request_type", - [ - logging_config.UpdateSettingsRequest, - dict, - ], -) -def test_update_settings(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + assert args[0] == request_msg - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings( - name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) ) - response = client.update_settings(request) + await client.get_sink(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request_msg = logging_config.GetSinkRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True + assert args[0] == request_msg -def test_update_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - client.update_settings() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) + await client.create_sink(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request_msg = logging_config.CreateSinkRequest() + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_settings_async( - transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest -): +async def test_update_sink_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings( + logging_config.LogSink( name="name_value", - kms_key_name="kms_key_name_value", - kms_service_account_id="kms_service_account_id_value", - storage_location="storage_location_value", - disable_default_sink=True, + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, ) ) - response = await client.update_settings(request) + await client.update_sink(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request_msg = logging_config.UpdateSinkRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.Settings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.kms_service_account_id == "kms_service_account_id_value" - assert response.storage_location == "storage_location_value" - assert response.disable_default_sink is True + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_settings_async_from_dict(): - await test_update_settings_async(request_type=dict) +async def test_delete_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request=None) -def test_update_settings_field_headers(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateSettingsRequest() + assert args[0] == request_msg - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: - call.return_value = logging_config.Settings() - client.update_settings(request) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.CreateLinkRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_settings_field_headers_async(): +async def test_list_links_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = logging_config.UpdateSettingsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings() + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) ) - await client.update_settings(request) + await client.list_links(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + request_msg = logging_config.ListLinksRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == request_msg -def test_update_settings_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_settings( - settings=logging_config.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) ) + await client.get_link(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = logging_config.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + request_msg = logging_config.GetLinkRequest() + assert args[0] == request_msg -def test_update_settings_flattened_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_exclusions_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_settings( - logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) ) + await client.list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_settings_flattened_async(): +async def test_get_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.Settings() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_settings( - settings=logging_config.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) ) + await client.get_exclusion(request=None) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].settings - mock_val = logging_config.Settings(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + request_msg = logging_config.GetExclusionRequest() + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_settings_flattened_error_async(): +async def test_create_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_settings( - logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) ) + await client.create_exclusion(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() -@pytest.mark.parametrize( - "request_type", - [ - logging_config.CopyLogEntriesRequest, - dict, - ], -) -def test_copy_log_entries(request_type, transport: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + assert args[0] == request_msg - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.copy_log_entries(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) + await client.update_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request_msg = logging_config.UpdateExclusionRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert args[0] == request_msg -def test_copy_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: - client.copy_log_entries() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request=None) + + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request_msg = logging_config.DeleteExclusionRequest() + assert args[0] == request_msg + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_async( - transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest -): +async def test_get_cmek_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) ) - response = await client.copy_log_entries(request) + await client.get_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + # Establish that the underlying stub method was called. + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request_msg = logging_config.GetCmekSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_copy_log_entries_async_from_dict(): - await test_copy_log_entries_async(request_type=dict) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +async def test_update_cmek_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, - transport=transport, + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) ) + await client.update_cmek_settings(request=None) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateCmekSettingsRequest() - # It is an error to provide scopes and a transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - with pytest.raises(ValueError): - client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) ) + await client.get_settings(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSettingsRequest() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ConfigServiceV2Client(transport=transport) - assert client.transport is transport + assert args[0] == request_msg -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - channel = transport.grpc_channel - assert channel - transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + await client.update_settings(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = ConfigServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_copy_log_entries_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.copy_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -6878,6 +13364,8 @@ def test_config_service_v2_base_transport(): methods = ( "list_buckets", "get_bucket", + "create_bucket_async", + "update_bucket_async", "create_bucket", "update_bucket", "delete_bucket", @@ -6892,6 +13380,10 @@ def test_config_service_v2_base_transport(): "create_sink", "update_sink", "delete_sink", + "create_link", + "delete_link", + "list_links", + "get_link", "list_exclusions", "get_exclusion", "create_exclusion", @@ -6902,6 +13394,9 @@ def test_config_service_v2_base_transport(): "get_settings", "update_settings", "copy_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7296,316 +13791,770 @@ def test_config_service_v2_grpc_lro_async_client(): ) transport = client.transport - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cmek_settings_path(): + project = "squid" + expected = "projects/{project}/cmekSettings".format( + project=project, + ) + actual = ConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual + + +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.cmek_settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual + + +def test_link_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + link = "nudibranch" + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) + ) + actual = ConfigServiceV2Client.link_path(project, location, bucket, link) + assert expected == actual + + +def test_parse_link_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "bucket": "winkle", + "link": "nautilus", + } + path = ConfigServiceV2Client.link_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_link_path(path) + assert expected == actual + + +def test_log_bucket_path(): + project = "scallop" + location = "abalone" + bucket = "squid" + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) + actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual + + +def test_parse_log_bucket_path(): + expected = { + "project": "clam", + "location": "whelk", + "bucket": "octopus", + } + path = ConfigServiceV2Client.log_bucket_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual + + +def test_log_exclusion_path(): + project = "oyster" + exclusion = "nudibranch" + expected = "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) + actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual + + +def test_parse_log_exclusion_path(): + expected = { + "project": "cuttlefish", + "exclusion": "mussel", + } + path = ConfigServiceV2Client.log_exclusion_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual + + +def test_log_sink_path(): + project = "winkle" + sink = "nautilus" + expected = "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) + actual = ConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual + + +def test_parse_log_sink_path(): + expected = { + "project": "scallop", + "sink": "abalone", + } + path = ConfigServiceV2Client.log_sink_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual + + +def test_log_view_path(): + project = "squid" + location = "clam" + bucket = "whelk" + view = "octopus" + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) + ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "bucket": "cuttlefish", + "view": "mussel", + } + path = ConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + + +def test_settings_path(): + project = "winkle" + expected = "projects/{project}/settings".format( + project=project, + ) + actual = ConfigServiceV2Client.settings_path(project) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "project": "nautilus", + } + path = ConfigServiceV2Client.settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_settings_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) + actual = ConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = ConfigServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual -def test_cmek_settings_path(): - project = "squid" - expected = "projects/{project}/cmekSettings".format( - project=project, + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, ) - actual = ConfigServiceV2Client.cmek_settings_path(project) + actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual -def test_parse_cmek_settings_path(): +def test_parse_common_folder_path(): expected = { - "project": "clam", + "folder": "clam", } - path = ConfigServiceV2Client.cmek_settings_path(**expected) + path = ConfigServiceV2Client.common_folder_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + actual = ConfigServiceV2Client.parse_common_folder_path(path) assert expected == actual -def test_log_bucket_path(): - project = "whelk" - location = "octopus" - bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( - project=project, - location=location, - bucket=bucket, +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, ) - actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual -def test_parse_log_bucket_path(): +def test_parse_common_organization_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "bucket": "mussel", + "organization": "octopus", } - path = ConfigServiceV2Client.log_bucket_path(**expected) + path = ConfigServiceV2Client.common_organization_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_bucket_path(path) + actual = ConfigServiceV2Client.parse_common_organization_path(path) assert expected == actual -def test_log_exclusion_path(): - project = "winkle" - exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format( +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( project=project, - exclusion=exclusion, ) - actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual -def test_parse_log_exclusion_path(): +def test_parse_common_project_path(): expected = { - "project": "scallop", - "exclusion": "abalone", + "project": "nudibranch", } - path = ConfigServiceV2Client.log_exclusion_path(**expected) + path = ConfigServiceV2Client.common_project_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + actual = ConfigServiceV2Client.parse_common_project_path(path) assert expected == actual -def test_log_sink_path(): - project = "squid" - sink = "clam" - expected = "projects/{project}/sinks/{sink}".format( +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( project=project, - sink=sink, + location=location, ) - actual = ConfigServiceV2Client.log_sink_path(project, sink) + actual = ConfigServiceV2Client.common_location_path(project, location) assert expected == actual -def test_parse_log_sink_path(): +def test_parse_common_location_path(): expected = { - "project": "whelk", - "sink": "octopus", + "project": "winkle", + "location": "nautilus", } - path = ConfigServiceV2Client.log_sink_path(**expected) + path = ConfigServiceV2Client.common_location_path(**expected) # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_sink_path(path) + actual = ConfigServiceV2Client.parse_common_location_path(path) assert expected == actual -def test_log_view_path(): - project = "oyster" - location = "nudibranch" - bucket = "cuttlefish" - view = "mussel" - expected = ( - "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( - project=project, - location=location, - bucket=bucket, - view=view, - ) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_cancel_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_log_view_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "bucket": "scallop", - "view": "abalone", - } - path = ConfigServiceV2Client.log_view_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_log_view_path(path) - assert expected == actual + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_settings_path(): - project = "squid" - expected = "projects/{project}/settings".format( - project=project, + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), ) - actual = ConfigServiceV2Client.settings_path(project) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" -def test_parse_settings_path(): - expected = { - "project": "clam", - } - path = ConfigServiceV2Client.settings_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_settings_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, +def test_get_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.common_billing_account_path(billing_account) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ConfigServiceV2Client.common_billing_account_path(**expected) - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_billing_account_path(path) - assert expected == actual +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format( - folder=folder, +def test_list_operations(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = ConfigServiceV2Client.common_folder_path(folder) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ConfigServiceV2Client.common_folder_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_folder_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format( - organization=organization, +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - actual = ConfigServiceV2Client.common_organization_path(organization) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ConfigServiceV2Client.common_organization_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_organization_path(path) - assert expected == actual + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format( - project=project, +def test_list_operations_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = ConfigServiceV2Client.common_project_path(project) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ConfigServiceV2Client.common_project_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_project_path(path) - assert expected == actual + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), ) - actual = ConfigServiceV2Client.common_location_path(project, location) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ConfigServiceV2Client.common_location_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = ConfigServiceV2Client.parse_common_location_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() +def test_list_operations_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() - with mock.patch.object( - transports.ConfigServiceV2Transport, "_prep_wrapped_messages" - ) as prep: - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + response = client.list_operations( + request={ + "name": "locations", + } ) - prep.assert_called_once_with(client_info) + call.assert_called() - with mock.patch.object( - transports.ConfigServiceV2Transport, "_prep_wrapped_messages" - ) as prep: - transport_class = ConfigServiceV2Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() ) - prep.assert_called_once_with(client_info) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_transport_close_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" + type(getattr(client.transport, "_grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - def test_client_ctx(): transports = [ "grpc", @@ -7643,7 +14592,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index d8bbd5b73..ef3833740 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,11 +24,20 @@ import grpc from grpc.experimental import aio +import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -36,6 +45,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( @@ -48,6 +58,7 @@ from google.cloud.logging_v2.types import logging from google.logging.type import http_request_pb2 # type: ignore from google.logging.type import log_severity_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -56,10 +67,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -71,6 +104,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -101,6 +145,237 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LoggingServiceV2Client._get_client_cert_source(None, False) is None + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LoggingServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LoggingServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + LoggingServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + LoggingServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, None) + == LoggingServiceV2Client._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -203,13 +478,13 @@ def test_logging_service_v2_client_get_transport_class(): ) @mock.patch.object( LoggingServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), ) @mock.patch.object( LoggingServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), ) def test_logging_service_v2_client_client_options( client_class, transport_class, transport_name @@ -251,7 +526,9 @@ def test_logging_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -281,15 +558,23 @@ def test_logging_service_v2_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -299,7 +584,9 @@ def test_logging_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -317,7 +604,9 @@ def test_logging_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -358,13 +647,13 @@ def test_logging_service_v2_client_client_options( ) @mock.patch.object( LoggingServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), ) @mock.patch.object( LoggingServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LoggingServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_logging_service_v2_client_mtls_env_auto( @@ -387,7 +676,9 @@ def test_logging_service_v2_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -419,7 +710,9 @@ def test_logging_service_v2_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -453,7 +746,9 @@ def test_logging_service_v2_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -543,6 +838,115 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -568,7 +972,9 @@ def test_logging_service_v2_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -607,7 +1013,9 @@ def test_logging_service_v2_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -667,7 +1075,9 @@ def test_logging_service_v2_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -737,26 +1147,114 @@ def test_delete_log(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request = logging.DeleteLogRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_log_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_delete_log_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.DeleteLogRequest( + log_name="log_name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: - client.delete_log() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_log(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + assert args[0] == logging.DeleteLogRequest( + log_name="log_name_value", + ) + + +def test_delete_log_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc + request = {} + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_log + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_log + ] = mock_rpc + + request = {} + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -764,7 +1262,7 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -781,7 +1279,8 @@ async def test_delete_log_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request = logging.DeleteLogRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -824,7 +1323,7 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -892,7 +1391,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -919,7 +1418,7 @@ async def test_delete_log_flattened_async(): @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -959,60 +1458,153 @@ def test_write_log_entries(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request = logging.WriteLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_write_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.WriteLogEntriesRequest( + log_name="log_name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.write_log_entries), "__call__" ) as call: - client.write_log_entries() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.write_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + assert args[0] == logging.WriteLogEntriesRequest( + log_name="log_name_value", + ) -@pytest.mark.asyncio -async def test_write_log_entries_async( - transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest -): - client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_write_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.WriteLogEntriesResponse() + # Ensure method has been cached + assert client._transport.write_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = await client.write_log_entries(request) + client._transport._wrapped_methods[ + client._transport.write_log_entries + ] = mock_rpc + request = {} + client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + assert mock_rpc.call_count == 1 - # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) + client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_write_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.write_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.write_log_entries + ] = mock_rpc + + request = {} + await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_write_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest +): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) + response = await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging.WriteLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) @pytest.mark.asyncio @@ -1078,7 +1670,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1121,7 +1713,7 @@ async def test_write_log_entries_flattened_async(): @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1164,27 +1756,123 @@ def test_list_log_entries(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request = logging.ListLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) assert response.next_page_token == "next_page_token_value" -def test_list_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogEntriesRequest( + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: - client.list_log_entries() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + assert args[0] == logging.ListLogEntriesRequest( + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + +def test_list_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_log_entries + ] = mock_rpc + request = {} + client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_log_entries + ] = mock_rpc + + request = {} + await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1192,7 +1880,7 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1213,7 +1901,8 @@ async def test_list_log_entries_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request = logging.ListLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) @@ -1276,7 +1965,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1313,7 +2002,7 @@ async def test_list_log_entries_flattened_async(): @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1329,7 +2018,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1364,10 +2053,14 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - pager = client.list_log_entries(request={}) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_log_entries(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1376,7 +2069,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1418,7 +2111,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1468,7 +2161,7 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1504,9 +2197,11 @@ async def test_list_log_entries_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_log_entries(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1542,29 +2237,124 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = "grp # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) assert response.next_page_token == "next_page_token_value" -def test_list_monitored_resource_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListMonitoredResourceDescriptorsRequest( + page_token="page_token_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), "__call__" ) as call: - client.list_monitored_resource_descriptors() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_monitored_resource_descriptors(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( + page_token="page_token_value", + ) + + +def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_monitored_resource_descriptors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_monitored_resource_descriptors + ] = mock_rpc + request = {} + client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_monitored_resource_descriptors + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_monitored_resource_descriptors + ] = mock_rpc + + request = {} + await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1573,7 +2363,7 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1596,7 +2386,8 @@ async def test_list_monitored_resource_descriptors_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) @@ -1610,7 +2401,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1647,10 +2438,16 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - pager = client.list_monitored_resource_descriptors(request={}) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_monitored_resource_descriptors( + request={}, retry=retry, timeout=timeout + ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1662,7 +2459,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1706,7 +2503,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1761,7 +2558,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1799,9 +2596,11 @@ async def test_list_monitored_resource_descriptors_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_monitored_resource_descriptors(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1836,7 +2635,8 @@ def test_list_logs(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request = logging.ListLogsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) @@ -1844,20 +2644,109 @@ def test_list_logs(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_logs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_logs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogsRequest( + parent="parent_value", + page_token="page_token_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: - client.list_logs() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_logs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + assert args[0] == logging.ListLogsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_logs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_logs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc + request = {} + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_logs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_logs + ] = mock_rpc + + request = {} + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1865,7 +2754,7 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1887,7 +2776,8 @@ async def test_list_logs_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request = logging.ListLogsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) @@ -1932,7 +2822,7 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2002,7 +2892,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2031,7 +2921,7 @@ async def test_list_logs_flattened_async(): @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2045,7 +2935,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2080,13 +2970,17 @@ def test_list_logs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_logs(request={}) + pager = client.list_logs(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2095,7 +2989,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2137,7 +3031,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2187,7 +3081,7 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2223,9 +3117,11 @@ async def test_list_logs_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_logs(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2265,12 +3161,91 @@ def test_tail_log_entries(request_type, transport: str = "grpc"): assert isinstance(message, logging.TailLogEntriesResponse) +def test_tail_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.tail_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.tail_log_entries + ] = mock_rpc + request = [{}] + client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.tail_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.tail_log_entries + ] = mock_rpc + + request = [{}] + await client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2337,7 +3312,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -2394,60 +3369,321 @@ def test_transport_adc(transport_class): adc.assert_called_once() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = LoggingServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = LoggingServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_initialize_client_w_grpc(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - assert isinstance( - client.transport, - transports.LoggingServiceV2GrpcTransport, + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value = None + client.delete_log(request=None) -def test_logging_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + assert args[0] == request_msg -def test_logging_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "delete_log", - "write_log_entries", - "list_log_entries", - "list_monitored_resource_descriptors", - "list_logs", - "tail_log_entries", - ) - for method in methods: +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_write_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + call.return_value = logging.WriteLogEntriesResponse() + client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value = logging.ListLogEntriesResponse() + client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_monitored_resource_descriptors_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + call.return_value = logging.ListMonitoredResourceDescriptorsResponse() + client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_logs_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) + await client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_logs_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, + ) + + +def test_logging_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_logging_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete_log", + "write_log_entries", + "list_log_entries", + "list_monitored_resource_descriptors", + "list_logs", + "tail_log_entries", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -2964,35 +4200,458 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +def test_cancel_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None - for transport, close_name in transports.items(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): @@ -3032,7 +4691,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 39a005c97..7c59a09f1 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,11 +24,20 @@ import grpc from grpc.experimental import aio +import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore from google.api import launch_stage_pb2 # type: ignore @@ -39,6 +48,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( @@ -48,16 +58,39 @@ from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -69,6 +102,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -99,6 +143,237 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MetricsServiceV2Client._get_client_cert_source(None, False) is None + assert ( + MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MetricsServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MetricsServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + MetricsServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MetricsServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MetricsServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MetricsServiceV2Client._get_universe_domain(None, None) + == MetricsServiceV2Client._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -216,13 +491,13 @@ def test_metrics_service_v2_client_get_transport_class(): ) @mock.patch.object( MetricsServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), ) @mock.patch.object( MetricsServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), ) def test_metrics_service_v2_client_client_options( client_class, transport_class, transport_name @@ -264,7 +539,9 @@ def test_metrics_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -294,15 +571,23 @@ def test_metrics_service_v2_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -312,7 +597,9 @@ def test_metrics_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -330,7 +617,9 @@ def test_metrics_service_v2_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -371,13 +660,13 @@ def test_metrics_service_v2_client_client_options( ) @mock.patch.object( MetricsServiceV2Client, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2Client), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), ) @mock.patch.object( MetricsServiceV2AsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(MetricsServiceV2AsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_metrics_service_v2_client_mtls_env_auto( @@ -400,7 +689,9 @@ def test_metrics_service_v2_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -432,7 +723,9 @@ def test_metrics_service_v2_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -466,7 +759,9 @@ def test_metrics_service_v2_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -556,6 +851,115 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) +def test_metrics_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -581,7 +985,9 @@ def test_metrics_service_v2_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -620,7 +1026,9 @@ def test_metrics_service_v2_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -680,7 +1088,9 @@ def test_metrics_service_v2_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -752,27 +1162,121 @@ def test_list_log_metrics(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) assert response.next_page_token == "next_page_token_value" -def test_list_log_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_log_metrics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.ListLogMetricsRequest( + parent="parent_value", + page_token="page_token_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: - client.list_log_metrics() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_log_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + assert args[0] == logging_metrics.ListLogMetricsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_log_metrics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_metrics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_log_metrics + ] = mock_rpc + request = {} + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_log_metrics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_log_metrics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_log_metrics + ] = mock_rpc + + request = {} + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -780,7 +1284,7 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -801,7 +1305,8 @@ async def test_list_log_metrics_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) @@ -845,7 +1350,7 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -915,7 +1420,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -944,7 +1449,7 @@ async def test_list_log_metrics_flattened_async(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -958,7 +1463,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -993,13 +1498,17 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_log_metrics(request={}) + pager = client.list_log_metrics(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1008,7 +1517,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1050,7 +1559,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1100,7 +1609,7 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1136,9 +1645,11 @@ async def test_list_log_metrics_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_log_metrics(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1168,6 +1679,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1177,44 +1689,135 @@ def test_get_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_get_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.GetLogMetricRequest( + metric_name="metric_name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: - client.get_log_metric() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + assert args[0] == logging_metrics.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + +def test_get_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc + request = {} + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_log_metric_async( - transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest +async def test_get_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_log_metric + ] = mock_rpc + + request = {} + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest +): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() @@ -1226,6 +1829,7 @@ async def test_get_log_metric_async( name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1236,13 +1840,15 @@ async def test_get_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1285,7 +1891,7 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1355,7 +1961,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1384,7 +1990,7 @@ async def test_get_log_metric_flattened_async(): @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1422,6 +2028,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1431,34 +2038,127 @@ def test_create_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.CreateLogMetricRequest( + parent="parent_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_log_metric), "__call__" ) as call: - client.create_log_metric() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + assert args[0] == logging_metrics.CreateLogMetricRequest( + parent="parent_value", + ) + + +def test_create_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_log_metric + ] = mock_rpc + request = {} + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_log_metric + ] = mock_rpc + + request = {} + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1466,7 +2166,7 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1484,6 +2184,7 @@ async def test_create_log_metric_async( name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1494,13 +2195,15 @@ async def test_create_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1545,7 +2248,7 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1624,7 +2327,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1659,7 +2362,7 @@ async def test_create_log_metric_flattened_async(): @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1698,6 +2401,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1707,34 +2411,127 @@ def test_update_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_update_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.UpdateLogMetricRequest( + metric_name="metric_name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_log_metric), "__call__" ) as call: - client.update_log_metric() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + assert args[0] == logging_metrics.UpdateLogMetricRequest( + metric_name="metric_name_value", + ) + + +def test_update_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_log_metric + ] = mock_rpc + request = {} + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_log_metric + ] = mock_rpc + + request = {} + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1742,7 +2539,7 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1760,6 +2557,7 @@ async def test_update_log_metric_async( name="name_value", description="description_value", filter="filter_value", + bucket_name="bucket_name_value", disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, @@ -1770,13 +2568,15 @@ async def test_update_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1821,7 +2621,7 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1900,7 +2700,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1935,7 +2735,7 @@ async def test_update_log_metric_flattened_async(): @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1976,28 +2776,120 @@ def test_delete_log_metric(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None -def test_delete_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_delete_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_log_metric), "__call__" ) as call: - client.delete_log_metric() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + assert args[0] == logging_metrics.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + +def test_delete_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_log_metric + ] = mock_rpc + request = {} + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_log_metric + ] = mock_rpc + + request = {} + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2005,7 +2897,7 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2024,7 +2916,8 @@ async def test_delete_log_metric_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2069,7 +2962,7 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2141,7 +3034,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2170,7 +3063,7 @@ async def test_delete_log_metric_flattened_async(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2216,7 +3109,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -2273,102 +3166,386 @@ def test_transport_adc(transport_class): adc.assert_called_once() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = MetricsServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = MetricsServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_initialize_client_w_grpc(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - assert isinstance( - client.transport, - transports.MetricsServiceV2GrpcTransport, + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_metrics_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client.list_log_metrics(request=None) -def test_metrics_service_v2_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + assert args[0] == request_msg -def test_metrics_service_v2_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_log_metrics", - "get_log_metric", - "create_log_metric", - "update_log_metric", - "delete_log_metric", +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value = logging_metrics.LogMetric() + client.get_log_metric(request=None) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + assert args[0] == request_msg -def test_metrics_service_v2_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/logging.admin", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - ), - quota_project_id="octopus", - ) + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value = logging_metrics.LogMetric() + client.create_log_metric(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() -def test_metrics_service_v2_base_transport_with_adc(): + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value = logging_metrics.LogMetric() + client.update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + call.return_value = None + client.delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = MetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_metrics_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + await client.get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + await client.create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) + await client.update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, + ) + + +def test_metrics_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_metrics_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_log_metrics", + "get_log_metric", + "create_log_metric", + "update_log_metric", + "delete_log_metric", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metrics_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" @@ -2842,35 +4019,458 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = MetricsServiceV2AsyncClient( +def test_cancel_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", + transport=transport, ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - for transport, close_name in transports.items(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): @@ -2910,7 +4510,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/tests/unit/handlers/__init__.py b/tests/unit/handlers/__init__.py index df379f1e9..32eba185f 100644 --- a/tests/unit/handlers/__init__.py +++ b/tests/unit/handlers/__init__.py @@ -11,3 +11,44 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + + +# Utility functions to setup mock OpenTelemetry spans, needed by multiple test +# suites. + +import contextlib + +import opentelemetry.context +import opentelemetry.trace + +from opentelemetry.trace import NonRecordingSpan +from opentelemetry.trace.span import TraceFlags + +_OTEL_SPAN_CONTEXT_TRACE_ID = 0x123456789123456789 +_OTEL_SPAN_CONTEXT_SPAN_ID = 0x123456789 +_OTEL_SPAN_CONTEXT_TRACEFLAGS = TraceFlags(TraceFlags.SAMPLED) + +_EXPECTED_OTEL_TRACE_ID = "00000000000000123456789123456789" +_EXPECTED_OTEL_SPAN_ID = "0000000123456789" +_EXPECTED_OTEL_TRACESAMPLED = True + + +@contextlib.contextmanager +def _setup_otel_span_context(): + """Sets up a nonrecording OpenTelemetry span with a mock span context that gets returned + by opentelemetry.trace.get_current_span, and returns it as a contextmanager + """ + span_context = opentelemetry.trace.SpanContext( + _OTEL_SPAN_CONTEXT_TRACE_ID, + _OTEL_SPAN_CONTEXT_SPAN_ID, + False, + trace_flags=_OTEL_SPAN_CONTEXT_TRACEFLAGS, + ) + ctx = opentelemetry.trace.set_span_in_context(NonRecordingSpan(span_context)) + tracer = opentelemetry.trace.NoOpTracer() + token = opentelemetry.context.attach(ctx) + try: + with tracer.start_as_current_span("test-span", context=ctx): + yield + finally: + opentelemetry.context.detach(token) diff --git a/tests/unit/handlers/test__helpers.py b/tests/unit/handlers/test__helpers.py index 6a7ff245f..d0577cf22 100644 --- a/tests/unit/handlers/test__helpers.py +++ b/tests/unit/handlers/test__helpers.py @@ -16,11 +16,22 @@ import mock +from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, +) + _FLASK_TRACE_ID = "flask0id" _FLASK_SPAN_ID = "span0flask" +_FLASK_SPAN_ID_XCTC_DEC = "12345" +_FLASK_SPAN_ID_XCTC_HEX = "3039".zfill(16) _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} _DJANGO_TRACE_ID = "django0id" _DJANGO_SPAN_ID = "span0django" +_DJANGO_SPAN_ID_XCTC_DEC = "54321" +_DJANGO_SPAN_ID_XCTC_HEX = "d431".zfill(16) _DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} @@ -57,8 +68,9 @@ def test_no_context_header(self): def test_xcloud_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" expected_trace_id = _FLASK_TRACE_ID - expected_span_id = _FLASK_SPAN_ID - flask_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" + input_span_id = _FLASK_SPAN_ID_XCTC_DEC + expected_span_id = _FLASK_SPAN_ID_XCTC_HEX + flask_trace_id = f"{expected_trace_id}/{input_span_id};o=1" app = self.create_app() context = app.test_request_context( @@ -166,9 +178,10 @@ def test_xcloud_header(self): from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" - expected_span_id = _DJANGO_SPAN_ID + input_span_id = _DJANGO_SPAN_ID_XCTC_DEC + expected_span_id = _DJANGO_SPAN_ID_XCTC_HEX expected_trace_id = _DJANGO_TRACE_ID - django_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" + django_trace_id = f"{expected_trace_id}/{input_span_id};o=1" django_request = RequestFactory().get( "/", **{django_trace_header: django_trace_id} @@ -242,6 +255,19 @@ def test_http_request_sparse(self): self.assertEqual(http_request["requestUrl"], expected_path) self.assertEqual(http_request["protocol"], "HTTP/1.1") + def test_invalid_host_header(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + invalid_http_host = "testserver%7d" + django_request = RequestFactory().put("/", HTTP_HOST=invalid_http_host) + middleware = request.RequestMiddleware(None) + middleware(django_request) + http_request, *_ = self._call_fut() + self.assertEqual(http_request["requestMethod"], "PUT") + self.assertIsNone(http_request["requestUrl"]) + self.assertEqual(http_request["protocol"], "HTTP/1.1") + class Test_get_request_data(unittest.TestCase): @staticmethod @@ -343,6 +369,120 @@ def test_wo_libraries(self): output = self._call_fut() self.assertEqual(output, (None, None, None, False)) + def test_otel_span_exists_no_request(self): + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual( + output, + ( + None, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_otel_span_exists_django_request(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual( + output, + ( + _DJANGO_HTTP_REQUEST, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_otel_span_exists_flask_request(self): + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual( + output, + ( + _FLASK_HTTP_REQUEST, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_otel_span_exists_both_django_and_flask(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + + with _setup_otel_span_context(): + _, _, output = self._helper(django_expected, flask_expected) + + # Django wins + self.assertEqual( + output, + ( + _DJANGO_HTTP_REQUEST, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ), + ) + + def test_no_otel_span_no_requests(self): + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, (None, None, None, False)) + + def test_no_otel_span_django_request(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) + _, _, output = self._helper(django_expected, flask_expected) + self.assertEqual(output, django_expected) + + def test_no_otel_span_flask_request(self): + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + _, _, output = self._helper(django_expected, flask_expected) + + # Django wins + self.assertEqual(output, flask_expected) + + def test_no_otel_span_both_django_and_flask(self): + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) + _, _, output = self._helper(django_expected, flask_expected) + + # Django wins + self.assertEqual(output, django_expected) + class Test__parse_xcloud_trace(unittest.TestCase): @staticmethod @@ -367,25 +507,40 @@ def test_no_span(self): self.assertEqual(sampled, False) def test_no_trace(self): - header = "/12345" + input_span = "12345" + expected_span = "3039".zfill(16) + header = f"/{input_span}" trace_id, span_id, sampled = self._call_fut(header) self.assertIsNone(trace_id) - self.assertEqual(span_id, "12345") + self.assertEqual(span_id, expected_span) self.assertEqual(sampled, False) def test_with_span(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span}" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span}" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) self.assertEqual(sampled, False) + def test_with_span_decimal_not_in_bounds(self): + input_spans = ["0", "9" * 100] + + for input_span in input_spans: + expected_trace = "12345" + header = f"{expected_trace}/{input_span}" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, expected_trace) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + def test_with_extra_characters(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span};abc" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span};abc" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) @@ -393,8 +548,9 @@ def test_with_extra_characters(self): def test_with_explicit_no_sampled(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=0" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span};o=0" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) @@ -402,8 +558,9 @@ def test_with_explicit_no_sampled(self): def test_with__sampled(self): expected_trace = "12345" - expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=1" + input_span = "67890" + expected_span = "10932".zfill(16) + header = f"{expected_trace}/{input_span};o=1" trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) @@ -464,3 +621,25 @@ def test_invalid_headers(self): self.assertIsNone(trace_id) self.assertIsNone(span_id) self.assertEqual(sampled, False) + + +class Test__parse_open_telemetry_data(unittest.TestCase): + @staticmethod + def _call_fut(): + from google.cloud.logging_v2.handlers import _helpers + + trace, span, sampled = _helpers._retrieve_current_open_telemetry_span() + return trace, span, sampled + + def test_no_op(self): + trace_id, span_id, sampled = self._call_fut() + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + + def test_span_exists(self): + with _setup_otel_span_context(): + trace_id, span_id, sampled = self._call_fut() + self.assertEqual(trace_id, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(span_id, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(sampled, _EXPECTED_OTEL_TRACESAMPLED) diff --git a/tests/unit/handlers/test__monitored_resources.py b/tests/unit/handlers/test__monitored_resources.py index 3c62cba88..28f064b7b 100644 --- a/tests/unit/handlers/test__monitored_resources.py +++ b/tests/unit/handlers/test__monitored_resources.py @@ -12,37 +12,30 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import unittest +import logging import mock import os import functools -from google.cloud.logging_v2.handlers._monitored_resources import ( - _create_functions_resource, -) from google.cloud.logging_v2.handlers._monitored_resources import ( _create_app_engine_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_functions_resource, _create_kubernetes_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( - _create_cloud_run_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_cloud_run_service_resource, + _create_cloud_run_job_resource, _create_compute_resource, -) -from google.cloud.logging_v2.handlers._monitored_resources import ( _create_global_resource, + detect_resource, + add_resource_labels, ) -from google.cloud.logging_v2.handlers._monitored_resources import detect_resource from google.cloud.logging_v2.handlers import _monitored_resources from google.cloud.logging_v2.resource import Resource class Test_Create_Resources(unittest.TestCase): - PROJECT = "test-project" LOCATION = "test-location" NAME = "test-name" @@ -54,6 +47,7 @@ def _mock_metadata(self, endpoint): if ( endpoint == _monitored_resources._ZONE_ID or endpoint == _monitored_resources._REGION_ID + or endpoint == _monitored_resources._GKE_CLUSTER_LOCATION ): return self.LOCATION elif ( @@ -132,7 +126,6 @@ def test_functions_resource_no_name(self): self.assertEqual(func_resource.labels["function_name"], "") def test_create_kubernetes_resource(self): - patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", wraps=self._mock_metadata, @@ -160,7 +153,7 @@ def test_compute_resource(self): self.assertEqual(resource.labels["instance_id"], self.NAME) self.assertEqual(resource.labels["zone"], self.LOCATION) - def test_cloud_run_resource(self): + def test_cloud_run_service_resource(self): patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", wraps=self._mock_metadata, @@ -169,7 +162,7 @@ def test_cloud_run_resource(self): os.environ[_monitored_resources._CLOUD_RUN_REVISION_ID] = self.VERSION os.environ[_monitored_resources._CLOUD_RUN_CONFIGURATION_ID] = self.CONFIG with patch: - resource = _create_cloud_run_resource() + resource = _create_cloud_run_service_resource() self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_run_revision") self.assertEqual(resource.labels["project_id"], self.PROJECT) @@ -178,6 +171,23 @@ def test_cloud_run_resource(self): self.assertEqual(resource.labels["configuration_name"], self.CONFIG) self.assertEqual(resource.labels["location"], self.LOCATION) + def test_cloud_run_job_resource(self): + patch = mock.patch( + "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", + wraps=self._mock_metadata, + ) + os.environ[_monitored_resources._CLOUD_RUN_JOB_ID] = self.NAME + os.environ[_monitored_resources._CLOUD_RUN_EXECUTION_ID] = self.VERSION + os.environ[_monitored_resources._CLOUD_RUN_TASK_INDEX] = self.CONFIG + os.environ[_monitored_resources._CLOUD_RUN_TASK_ATTEMPT] = self.CLUSTER + with patch: + resource = _create_cloud_run_job_resource() + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_run_job") + self.assertEqual(resource.labels["project_id"], self.PROJECT) + self.assertEqual(resource.labels["job_name"], self.NAME) + self.assertEqual(resource.labels["location"], self.LOCATION) + def test_app_engine_resource(self): patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", @@ -214,7 +224,8 @@ def test_with_no_project_from_server(self): resource_fns = [ _global_resource_patched, _create_app_engine_resource, - _create_cloud_run_resource, + _create_cloud_run_service_resource, + _create_cloud_run_job_resource, _create_compute_resource, _create_kubernetes_resource, _create_functions_resource, @@ -225,7 +236,6 @@ def test_with_no_project_from_server(self): class Test_Resource_Detection(unittest.TestCase): - PROJECT = "test-project" def _mock_k8s_metadata(self, endpoint): @@ -285,13 +295,20 @@ def test_detect_legacy_functions(self): self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_function") - def test_detect_cloud_run(self): - for env in _monitored_resources._CLOUD_RUN_ENV_VARS: + def test_detect_cloud_run_service(self): + for env in _monitored_resources._CLOUD_RUN_SERVICE_ENV_VARS: os.environ[env] = "TRUE" resource = detect_resource(self.PROJECT) self.assertIsInstance(resource, Resource) self.assertEqual(resource.type, "cloud_run_revision") + def test_detect_cloud_run_job(self): + for env in _monitored_resources._CLOUD_RUN_JOB_ENV_VARS: + os.environ[env] = "TRUE" + resource = detect_resource(self.PROJECT) + self.assertIsInstance(resource, Resource) + self.assertEqual(resource.type, "cloud_run_job") + def test_detect_compute_engine(self): patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", @@ -327,3 +344,45 @@ def test_detect_partial_data(self): # project id not returned from metadata serve # should be empty string self.assertEqual(resource.labels["project_id"], "") + + +@pytest.mark.parametrize( + "resource_type,os_environ,record_attrs,expected_labels", + [ + ( + _monitored_resources._GAE_RESOURCE_TYPE, + {}, + {"_trace": "trace_id"}, + {_monitored_resources._GAE_TRACE_ID_LABEL: "trace_id"}, + ), + ( + _monitored_resources._CLOUD_RUN_JOB_RESOURCE_TYPE, + { + _monitored_resources._CLOUD_RUN_EXECUTION_ID: "test_job_12345", + _monitored_resources._CLOUD_RUN_TASK_INDEX: "1", + _monitored_resources._CLOUD_RUN_TASK_ATTEMPT: "12", + }, + {}, + { + _monitored_resources._CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL: "test_job_12345", + _monitored_resources._CLOUD_RUN_JOBS_TASK_INDEX_LABEL: "1", + _monitored_resources._CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL: "12", + }, + ), + ("global", {}, {}, {}), + ], +) +def test_add_resource_labels(resource_type, os_environ, record_attrs, expected_labels): + os.environ.clear() + record = logging.LogRecord("logname", None, None, None, "test", None, None) + + resource = Resource(type=resource_type, labels={}) + + for attr, val in record_attrs.items(): + setattr(record, attr, val) + + os.environ.update(os_environ) + + labels = add_resource_labels(resource, record) + + assert expected_labels == labels diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py index 8eedfad9b..38d607e99 100644 --- a/tests/unit/handlers/test_app_engine.py +++ b/tests/unit/handlers/test_app_engine.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import pytest import unittest import mock @@ -46,6 +47,9 @@ def test_constructor_w_gae_standard_env(self): ), mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value=self.PROJECT, + ), pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", ): handler = self._make_one(client, transport=_Transport) @@ -78,6 +82,9 @@ def test_constructor_w_gae_flex_env(self): ), mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value=self.PROJECT, + ), pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", ): handler = self._make_one( client, name=name, transport=_Transport, stream=stream @@ -99,7 +106,10 @@ def test_emit(self): "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(expected_http_request, trace_id, None, None), ) - with get_request_patch: + with get_request_patch, pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) @@ -137,7 +147,10 @@ def test_emit_manual_field_override(self): "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(inferred_http_request, inferred_trace_id, None, None), ) - with get_request_patch: + with get_request_patch, pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) @@ -153,7 +166,7 @@ def test_emit_manual_field_override(self): setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) - expected_http = {"reuqest_url": "manual"} + expected_http = {"request_url": "manual"} setattr(record, "http_request", expected_http) expected_resource = Resource(type="test", labels={}) setattr(record, "resource", expected_resource) @@ -197,12 +210,20 @@ def test_get_gae_labels_with_label(self): from google.cloud.logging_v2.handlers import app_engine trace_id = "test-gae-trace-id" - gae_labels = self._get_gae_labels_helper(trace_id) + with pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): + gae_labels = self._get_gae_labels_helper(trace_id) expected_labels = {app_engine._TRACE_ID_LABEL: trace_id} self.assertEqual(gae_labels, expected_labels) def test_get_gae_labels_without_label(self): - gae_labels = self._get_gae_labels_helper(None) + with pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): + gae_labels = self._get_gae_labels_helper(None) self.assertEqual(gae_labels, {}) diff --git a/tests/unit/handlers/test_container_engine.py b/tests/unit/handlers/test_container_engine.py index 280ab9cf0..5c814c53d 100644 --- a/tests/unit/handlers/test_container_engine.py +++ b/tests/unit/handlers/test_container_engine.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import unittest @@ -27,18 +28,30 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - handler = self._make_one() + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one() self.assertIsNone(handler.name) def test_ctor_w_name(self): - handler = self._make_one(name="foo") + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one(name="foo") self.assertEqual(handler.name, "foo") def test_format(self): import logging import json - handler = self._make_one() + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one() logname = "loggername" message = "hello world,嗨 世界" record = logging.LogRecord( @@ -51,6 +64,10 @@ def test_format(self): "thread": record.thread, "severity": record.levelname, } - payload = handler.format(record) + with pytest.warns( + DeprecationWarning, + match="format_stackdriver_json is deprecated. Use StructuredLogHandler instead", + ): + payload = handler.format(record) self.assertEqual(payload, json.dumps(expected_payload, ensure_ascii=False)) diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 1e431f1aa..3f25929e2 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -18,14 +18,25 @@ import mock import json +from google.cloud.logging_v2.handlers.handlers import ( + _INTERNAL_LOGGERS, + EXCLUDED_LOGGER_DEFAULTS, +) + from google.cloud.logging_v2.handlers._monitored_resources import ( _FUNCTION_ENV_VARS, _GAE_ENV_VARS, ) +from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, +) -class TestCloudLoggingFilter(unittest.TestCase): +class TestCloudLoggingFilter(unittest.TestCase): PROJECT = "PROJECT" @staticmethod @@ -129,7 +140,7 @@ def test_minimal_record(self): self.assertIsNone(record._labels) self.assertEqual(record._labels_str, "{}") - def test_record_with_request(self): + def test_record_with_xctc_request(self): """ test filter adds http request data when available """ @@ -150,8 +161,9 @@ def test_record_with_request(self): expected_path = "http://testserver/123" expected_agent = "Mozilla/5.0" expected_trace = "123" - expected_span = "456" - combined_trace = f"{expected_trace}/{expected_span};o=1" + input_span = "456" + expected_span = "1c8".zfill(16) + combined_trace = f"{expected_trace}/{input_span};o=1" expected_request = { "requestMethod": "GET", "requestUrl": expected_path, @@ -226,6 +238,136 @@ def test_record_with_traceparent_request(self): self.assertEqual(record._http_request, expected_request) self.assertEqual(record._http_request_str, json.dumps(expected_request)) + def test_record_with_opentelemetry_span_no_request(self): + filter_obj = self._make_one() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + + with _setup_otel_span_context(): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._trace_str, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._span_id, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._span_id_str, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._trace_sampled, _EXPECTED_OTEL_TRACESAMPLED) + self.assertEqual(record._trace_sampled_str, "true") + self.assertIsNone(record._http_request) + self.assertEqual(record._http_request_str, "{}") + + def test_record_with_opentelemetry_span_and_request(self): + filter_obj = self._make_one() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + http_path = "http://testserver/123" + http_agent = "Mozilla/5.0" + http_trace = "123" + http_span = "456" + combined_trace = f"{http_trace}/{http_span};o=1" + expected_request = { + "requestMethod": "GET", + "requestUrl": http_path, + "userAgent": http_agent, + "protocol": "HTTP/1.1", + } + + app = self.create_app() + with app.test_request_context( + http_path, + headers={ + "User-Agent": http_agent, + "X_CLOUD_TRACE_CONTEXT": combined_trace, + }, + ): + with _setup_otel_span_context(): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._trace_str, _EXPECTED_OTEL_TRACE_ID) + self.assertEqual(record._span_id, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._span_id_str, _EXPECTED_OTEL_SPAN_ID) + self.assertEqual(record._trace_sampled, _EXPECTED_OTEL_TRACESAMPLED) + self.assertEqual(record._trace_sampled_str, "true") + + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + + def test_record_with_opentelemetry_span_and_request_with_overrides(self): + """ + sort of does what the test after this one does, but more in the context of OTel precedence + """ + filter_obj = self._make_one() + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) + record.created = None + http_path = "http://testserver/123" + http_agent = "Mozilla/5.0" + http_trace = "123" + http_span = "456" + combined_trace = f"{http_trace}/{http_span};o=1" + expected_request = { + "requestMethod": "GET", + "requestUrl": http_path, + "userAgent": http_agent, + "protocol": "HTTP/1.1", + } + + overwritten_trace = "01234" + overwritten_span = "43210" + overwritten_tracesampled = False + record.trace = overwritten_trace + record.span_id = overwritten_span + record.trace_sampled = overwritten_tracesampled + + app = self.create_app() + with app.test_request_context( + http_path, + headers={ + "User-Agent": http_agent, + "X_CLOUD_TRACE_CONTEXT": combined_trace, + }, + ): + with _setup_otel_span_context(): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, overwritten_trace) + self.assertEqual(record._trace_str, overwritten_trace) + self.assertEqual(record._span_id, overwritten_span) + self.assertEqual(record._span_id_str, overwritten_span) + self.assertFalse(record._trace_sampled) + self.assertEqual( + record._trace_sampled_str, json.dumps(overwritten_tracesampled) + ) + + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + def test_user_overrides(self): """ ensure user can override fields @@ -291,7 +433,6 @@ def test_user_overrides(self): class TestCloudLoggingHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod @@ -320,6 +461,7 @@ def test_ctor_defaults(self): self.assertEqual(handler.name, DEFAULT_LOGGER_NAME) self.assertIs(handler.client, client) self.assertIsInstance(handler.transport, _Transport) + self.assertTrue(handler._transport_open) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) global_resource = _create_global_resource(self.PROJECT) @@ -327,6 +469,17 @@ def test_ctor_defaults(self): self.assertIsNone(handler.labels) self.assertIs(handler.stream, sys.stderr) + def test_add_handler_to_client_handlers(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, + ) + self.assertIn(handler, client._handlers) + def test_ctor_explicit(self): import io from google.cloud.logging import Resource @@ -432,7 +585,7 @@ def test_emit_manual_field_override(self): setattr(record, "span_id", expected_span) expected_sampled = True setattr(record, "trace_sampled", expected_sampled) - expected_http = {"reuqest_url": "manual"} + expected_http = {"request_url": "manual"} setattr(record, "http_request", expected_http) expected_source = {"file": "test-file"} setattr(record, "source_location", expected_source) @@ -649,6 +802,56 @@ def test_emit_with_encoded_json(self): ), ) + def test_emit_after_close(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + logname = "loggername" + message = "hello world" + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + handler.handle(record) + old_transport = handler.transport + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + False, + None, + None, + ), + ) + + handler.close() + self.assertFalse(handler._transport_open) + + handler.handle(record) + self.assertTrue(handler._transport_open) + self.assertNotEqual(handler.transport, old_transport) + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + False, + None, + None, + ), + ) + def test_format_with_arguments(self): """ Handler should support format string arguments @@ -684,6 +887,24 @@ def test_format_with_arguments(self): ), ) + def test_close(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, + ) + old_transport = handler.transport + handler.close() + self.assertFalse(handler._transport_open) + self.assertTrue(old_transport.close_called) + + # second call to close shouldn't throw an exception + handler.close() + self.assertFalse(handler._transport_open) + class TestFormatAndParseMessage(unittest.TestCase): def test_none(self): @@ -859,7 +1080,7 @@ def test_json_fields_input_unmodified(self): _format_and_parse_message(record, handler) # ensure json_fields has no side-effects self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) - for (key, value) in json_fields_orig.items(): + for key, value in json_fields_orig.items(): self.assertEqual( value, json_fields[key], f"expected_payload[{key}] != result[{key}]" ) @@ -869,7 +1090,7 @@ class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers import setup_logging - if excludes: + if excludes is not None: return setup_logging(handler, excluded_loggers=excludes) else: return setup_logging(handler) @@ -895,6 +1116,24 @@ def test_setup_logging_excludes(self): self.assertNotIn(handler, excluded_logger.handlers) self.assertFalse(excluded_logger.propagate) + def test_setup_logging_internal_loggers_no_excludes(self): + handler = _Handler(logging.INFO) + self._call_fut(handler, excludes=()) + + # Test that excluded logger defaults can be included, but internal + # loggers can't be. + for logger_name in _INTERNAL_LOGGERS: + logger = logging.getLogger(logger_name) + self.assertNotIn(handler, logger.handlers) + self.assertFalse(logger.propagate) + + logger = logging.getLogger("logging") + self.assertTrue(logger.propagate) + + for logger_name in EXCLUDED_LOGGER_DEFAULTS: + logger = logging.getLogger(logger_name) + self.assertTrue(logger.propagate) + @patch.dict("os.environ", {envar: "1" for envar in _FUNCTION_ENV_VARS}) def test_remove_handlers_gcf(self): logger = logging.getLogger() @@ -941,10 +1180,18 @@ def test_keep_handlers_others(self): def setUp(self): self._handlers_cache = logging.getLogger().handlers[:] + # reset the logging manager every time so that we're not reusing loggers + # across different test cases. + self._logger_manager = logging.Logger.manager + logging.Logger.manager = logging.Manager(logging.Logger.root) + def tearDown(self): # cleanup handlers logging.getLogger().handlers = self._handlers_cache[:] + # restore the old logging manager. + logging.Logger.manager = self._logger_manager + class _Handler(object): def __init__(self, level): @@ -960,12 +1207,14 @@ def release(self): class _Client(object): def __init__(self, project): self.project = project + self._handlers = set() class _Transport(object): def __init__(self, client, name, resource=None): self.client = client self.name = name + self.close_called = False def send( self, @@ -990,3 +1239,6 @@ def send( http_request, source_location, ) + + def close(self): + self.close_called = True diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 353530ed1..908758749 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -86,7 +86,7 @@ def test_format(self): } handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) self.assertEqual( len(expected_payload.keys()), @@ -121,7 +121,7 @@ def test_format_minimal(self): handler.filter(record) result = json.loads(handler.format(record)) self.assertEqual(set(expected_payload.keys()), set(result.keys())) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" ) @@ -304,7 +304,7 @@ def test_format_with_reserved_json_field(self): handler.filter(record) result = json.loads(handler.format(record)) self.assertEqual(set(expected_payload.keys()), set(result.keys())) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" ) @@ -382,7 +382,7 @@ def test_format_with_arguments(self): result = handler.format(record) self.assertIn(expected_result, result) - def test_format_with_request(self): + def test_format_with_xctc_request(self): import logging import json @@ -393,8 +393,9 @@ def test_format_with_request(self): expected_path = "http://testserver/123" expected_agent = "Mozilla/5.0" expected_trace = "123" - expected_span = "456" - trace_header = f"{expected_trace}/{expected_span};o=1" + input_span = "456" + expected_span = "1c8".zfill(16) + trace_header = f"{expected_trace}/{input_span};o=1" expected_payload = { "logging.googleapis.com/trace": expected_trace, "logging.googleapis.com/spanId": expected_span, @@ -417,7 +418,7 @@ def test_format_with_request(self): ): handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_with_traceparent(self): @@ -452,14 +453,14 @@ def test_format_with_traceparent(self): ): handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_overrides(self): """ Allow users to override log fields using `logging.info("", extra={})` - If supported fields were overriden by the user, those choices should + If supported fields were overridden by the user, those choices should take precedence. """ import logging @@ -509,7 +510,108 @@ def test_format_overrides(self): ) handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): + self.assertEqual(value, result[key]) + + def test_format_with_opentelemetry_span(self): + import logging + import json + + from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ) + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_payload = { + "logging.googleapis.com/trace": _EXPECTED_OTEL_TRACE_ID, + "logging.googleapis.com/spanId": _EXPECTED_OTEL_SPAN_ID, + "logging.googleapis.com/trace_sampled": _EXPECTED_OTEL_TRACESAMPLED, + } + + with _setup_otel_span_context(): + handler.filter(record) + result = json.loads(handler.format(record)) + for key, value in expected_payload.items(): + self.assertEqual(value, result[key]) + + def test_format_with_opentelemetry_span_and_request(self): + import logging + import json + + from tests.unit.handlers import ( + _setup_otel_span_context, + _EXPECTED_OTEL_TRACE_ID, + _EXPECTED_OTEL_SPAN_ID, + _EXPECTED_OTEL_TRACESAMPLED, + ) + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + http_trace = "123" + http_span = "456" + trace_header = f"{http_trace}/{http_span};o=1" + expected_payload = { + "logging.googleapis.com/trace": _EXPECTED_OTEL_TRACE_ID, + "logging.googleapis.com/spanId": _EXPECTED_OTEL_SPAN_ID, + "logging.googleapis.com/trace_sampled": _EXPECTED_OTEL_TRACESAMPLED, + "httpRequest": { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + }, + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={ + "User-Agent": expected_agent, + "X_CLOUD_TRACE_CONTEXT": trace_header, + }, + ): + with _setup_otel_span_context(): + handler.filter(record) + result = json.loads(handler.format(record)) + for key, value in expected_payload.items(): + self.assertEqual(value, result[key]) + + def test_format_with_opentelemetry_span_and_overrides(self): + import logging + import json + + from tests.unit.handlers import _setup_otel_span_context + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + overwrite_trace = "abc" + overwrite_span = "123" + overwrite_tracesampled = False + record.trace = overwrite_trace + record.span_id = overwrite_span + record.trace_sampled = overwrite_tracesampled + expected_payload = { + "logging.googleapis.com/trace": overwrite_trace, + "logging.googleapis.com/spanId": overwrite_span, + "logging.googleapis.com/trace_sampled": overwrite_tracesampled, + } + + with _setup_otel_span_context(): + handler.filter(record) + result = json.loads(handler.format(record)) + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_with_json_fields(self): @@ -590,7 +692,7 @@ def test_json_fields_input_unmodified(self): handler.format(record) # ensure json_fields has no side-effects self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) - for (key, value) in json_fields_orig.items(): + for key, value in json_fields_orig.items(): self.assertEqual( value, json_fields[key], f"expected_payload[{key}] != result[{key}]" ) diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index d4954ff7b..9fdccb172 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -12,13 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import contextlib import time import logging import queue +import re import unittest import mock +from io import StringIO + class TestBackgroundThreadHandler(unittest.TestCase): PROJECT = "PROJECT" @@ -176,6 +180,11 @@ def test_worker(self): class Test_Worker(unittest.TestCase): NAME = "python_logger" + def setUp(self): + import sys + + print("In method", self._testMethodName, file=sys.stderr) + @staticmethod def _get_target_class(): from google.cloud.logging_v2.handlers.transports import background_thread @@ -187,9 +196,26 @@ def _make_one(self, *args, **kw): def _start_with_thread_patch(self, worker): with mock.patch("threading.Thread", new=_Thread) as thread_mock: - with mock.patch("atexit.register") as atexit_mock: - worker.start() - return thread_mock, atexit_mock + worker.start() + return thread_mock + + @staticmethod + @contextlib.contextmanager + def _init_atexit_mock(): + atexit_mock = _AtexitMock() + with mock.patch.multiple( + "atexit", register=atexit_mock.register, unregister=atexit_mock.unregister + ): + yield atexit_mock + + @staticmethod + @contextlib.contextmanager + def _init_main_thread_is_alive_mock(is_alive): + with mock.patch("threading.main_thread") as main_thread_func_mock: + main_thread_obj_mock = mock.Mock() + main_thread_func_mock.return_value = main_thread_obj_mock + main_thread_obj_mock.is_alive = mock.Mock(return_value=is_alive) + yield def test_constructor(self): logger = _Logger(self.NAME) @@ -216,14 +242,15 @@ def test_start(self): worker = self._make_one(_Logger(self.NAME)) - _, atexit_mock = self._start_with_thread_patch(worker) + with self._init_atexit_mock() as atexit_mock: + self._start_with_thread_patch(worker) self.assertTrue(worker.is_alive) self.assertIsNotNone(worker._thread) self.assertTrue(worker._thread.daemon) self.assertEqual(worker._thread._target, worker._thread_main) self.assertEqual(worker._thread._name, background_thread._WORKER_THREAD_NAME) - atexit_mock.assert_called_once_with(worker._main_thread_terminated) + self.assertIn(worker._handle_exit, atexit_mock.registered_funcs) # Calling start again should not start a new thread. current_thread = worker._thread @@ -260,29 +287,33 @@ def test_stop_no_grace(self): self.assertEqual(thread._timeout, None) - def test__main_thread_terminated(self): + def test__close(self): worker = self._make_one(_Logger(self.NAME)) self._start_with_thread_patch(worker) - worker._main_thread_terminated() + worker._close("") self.assertFalse(worker.is_alive) # Calling twice should not be an error - worker._main_thread_terminated() + worker._close("") - def test__main_thread_terminated_non_empty_queue(self): + def test__close_non_empty_queue(self): worker = self._make_one(_Logger(self.NAME)) + msg = "My Message" self._start_with_thread_patch(worker) record = mock.Mock() record.created = time.time() worker.enqueue(record, "") - worker._main_thread_terminated() + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + worker._close(msg) + self.assertIn(msg, stderr_mock.getvalue()) self.assertFalse(worker.is_alive) - def test__main_thread_terminated_did_not_join(self): + def test__close_did_not_join(self): worker = self._make_one(_Logger(self.NAME)) self._start_with_thread_patch(worker) @@ -290,7 +321,65 @@ def test__main_thread_terminated_did_not_join(self): record = mock.Mock() record.created = time.time() worker.enqueue(record, "") - worker._main_thread_terminated() + worker._close("") + + self.assertFalse(worker.is_alive) + + def test__handle_exit(self): + from google.cloud.logging_v2.handlers.transports.background_thread import ( + _CLOSE_THREAD_SHUTDOWN_ERROR_MSG, + ) + + worker = self._make_one(_Logger(self.NAME)) + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + with self._init_main_thread_is_alive_mock(False): + with self._init_atexit_mock(): + self._start_with_thread_patch(worker) + self._enqueue_record(worker, "test") + worker._handle_exit() + + self.assertRegex( + stderr_mock.getvalue(), + re.compile("^%s$" % _CLOSE_THREAD_SHUTDOWN_ERROR_MSG, re.MULTILINE), + ) + + self.assertRegex( + stderr_mock.getvalue(), + re.compile( + r"^Failed to send %d pending logs\.$" % worker._queue.qsize(), + re.MULTILINE, + ), + ) + + def test__handle_exit_no_items(self): + worker = self._make_one(_Logger(self.NAME)) + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + with self._init_main_thread_is_alive_mock(False): + with self._init_atexit_mock(): + self._start_with_thread_patch(worker) + worker._handle_exit() + + self.assertEqual(stderr_mock.getvalue(), "") + + def test_close_unregister_atexit(self): + worker = self._make_one(_Logger(self.NAME)) + + with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock: + with self._init_atexit_mock() as atexit_mock: + self._start_with_thread_patch(worker) + self.assertIn(worker._handle_exit, atexit_mock.registered_funcs) + worker.close() + self.assertNotIn(worker._handle_exit, atexit_mock.registered_funcs) + + self.assertNotRegex( + stderr_mock.getvalue(), + re.compile( + r"^Failed to send %d pending logs\.$" % worker._queue.qsize(), + re.MULTILINE, + ), + ) self.assertFalse(worker.is_alive) @@ -402,6 +491,23 @@ def test__thread_main_batches(self): self.assertFalse(worker._cloud_logger._batch.commit_called) self.assertEqual(worker._queue.qsize(), 0) + def test__thread_main_main_thread_terminated(self): + from google.cloud.logging_v2.handlers.transports import background_thread + + worker = self._make_one(_Logger(self.NAME)) + self._enqueue_record(worker, "1") + worker._queue.put_nowait(background_thread._WORKER_TERMINATOR) + + with mock.patch("threading.main_thread") as main_thread_func_mock: + main_thread_obj_mock = mock.Mock() + main_thread_func_mock.return_value = main_thread_obj_mock + main_thread_obj_mock.is_alive = mock.Mock(return_value=False) + self._enqueue_record(worker, "1") + self._enqueue_record(worker, "2") + worker._thread_main() + + self.assertFalse(worker._cloud_logger._batch.commit_called) + @mock.patch("time.time", autospec=True, return_value=1) def test__thread_main_max_latency(self, time): # Note: this test is a bit brittle as it assumes the operation of @@ -565,3 +671,16 @@ def __init__(self, project, _http=None, credentials=None): def logger(self, name, resource=None): # pylint: disable=unused-argument self._logger = _Logger(name, resource=resource) return self._logger + + +class _AtexitMock(object): + """_AtexitMock is a simulation of registering/unregistering functions in atexit using a dummy set.""" + + def __init__(self): + self.registered_funcs = set() + + def register(self, func): + self.registered_funcs.add(func) + + def unregister(self, func): + self.registered_funcs.remove(func) diff --git a/tests/unit/handlers/transports/test_base.py b/tests/unit/handlers/transports/test_base.py index 71ef1366a..b723db87b 100644 --- a/tests/unit/handlers/transports/test_base.py +++ b/tests/unit/handlers/transports/test_base.py @@ -16,7 +16,6 @@ class TestBaseHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod @@ -39,3 +38,7 @@ def test_resource_is_valid_argunent(self): def test_flush_is_abstract_and_optional(self): target = self._make_one("client", "name") target.flush() + + def test_close_is_abstract_and_optional(self): + target = self._make_one("client", "name") + target.close() diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index 752a96d9f..01a949d24 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -17,7 +17,6 @@ class TestSyncHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index 8bf25870a..58e230129 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -17,6 +17,8 @@ import google.auth.credentials import mock +from datetime import datetime + import google.cloud.logging from google.cloud import logging_v2 from google.cloud.logging_v2 import _gapic @@ -173,6 +175,21 @@ def test_write_entries_single(self): assert request.entries[0].resource.type == entry["resource"]["type"] assert request.entries[0].text_payload == "text" + def test_write_entries_parse_error(self): + client = self.make_logging_api() + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.write_log_entries), "__call__" + ) as call: + entry = { + "logName": self.LOG_PATH, + "resource": {"type": "global"}, + "jsonPayload": {"time": datetime.now()}, + } + client.write_entries([entry]) + + call.assert_not_called() + def test_logger_delete(self): client = self.make_logging_api() @@ -595,7 +612,6 @@ def test_non_registry_failure(self, msg_to_dict_mock): msg_to_dict_mock.assert_called_once_with( entry_pb, preserving_proto_field_name=False, - including_default_value_fields=False, ) def test_unregistered_type(self): diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index f9b60cfa6..5709a50a6 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -24,7 +24,6 @@ def _make_credentials(): class TestConnection(unittest.TestCase): - PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" @@ -96,7 +95,6 @@ def test_extra_headers(self): class Test_LoggingAPI(unittest.TestCase): - PROJECT = "project" PROJECT_PATH = "projects/project" LIST_ENTRIES_PATH = "entries:list" @@ -124,9 +122,9 @@ def test_ctor(self): @staticmethod def _make_timestamp(): import datetime - from google.cloud._helpers import UTC + from datetime import timezone - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.datetime.now(timezone.utc) return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_with_limits(self): @@ -354,7 +352,6 @@ def test_logger_delete(self): class Test_SinksAPI(unittest.TestCase): - PROJECT = "project" PROJECT_PATH = "projects/project" FILTER = "logName:syslog AND severity>=ERROR" @@ -636,7 +633,6 @@ def test_sink_delete_hit(self): class Test_MetricsAPI(unittest.TestCase): - PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" LIST_METRICS_PATH = "projects/%s/metrics" % (PROJECT,) @@ -865,7 +861,6 @@ def test_metric_delete_hit(self): class _Connection(object): - _called_with = None _raise_conflict = False diff --git a/tests/unit/test__instrumentation.py b/tests/unit/test__instrumentation.py index dc330b0ca..97473ee61 100644 --- a/tests/unit/test__instrumentation.py +++ b/tests/unit/test__instrumentation.py @@ -17,7 +17,6 @@ class TestInstrumentation(unittest.TestCase): - TEST_NAME = "python" # LONG_NAME > 14 characters LONG_NAME = TEST_NAME + "789ABCDEF" @@ -26,7 +25,7 @@ class TestInstrumentation(unittest.TestCase): # LONG_VERSION > 16 characters LONG_VERSION = TEST_VERSION + "6789ABCDEF12" - def _get_diagonstic_value(self, entry, key): + def _get_diagnostic_value(self, entry, key): return entry.payload[i._DIAGNOSTIC_INFO_KEY][i._INSTRUMENTATION_SOURCE_KEY][-1][ key ] @@ -35,10 +34,10 @@ def test_default_diagnostic_info(self): entry = i._create_diagnostic_entry() self.assertEqual( i._PYTHON_LIBRARY_NAME, - self._get_diagonstic_value(entry, "name"), + self._get_diagnostic_value(entry, "name"), ) self.assertEqual( - i._LIBRARY_VERSION, self._get_diagonstic_value(entry, "version") + i._LIBRARY_VERSION, self._get_diagnostic_value(entry, "version") ) def test_custom_diagnostic_info(self): @@ -47,10 +46,10 @@ def test_custom_diagnostic_info(self): ) self.assertEqual( self.TEST_NAME, - self._get_diagonstic_value(entry, "name"), + self._get_diagnostic_value(entry, "name"), ) self.assertEqual( - self.TEST_VERSION, self._get_diagonstic_value(entry, "version") + self.TEST_VERSION, self._get_diagnostic_value(entry, "version") ) def test_truncate_long_values(self): @@ -61,8 +60,8 @@ def test_truncate_long_values(self): expected_name = self.LONG_NAME[: i._MAX_NAME_LENGTH] + "*" expected_version = self.LONG_VERSION[: i._MAX_VERSION_LENGTH] + "*" - self.assertEqual(expected_name, self._get_diagonstic_value(entry, "name")) - self.assertEqual(expected_version, self._get_diagonstic_value(entry, "version")) + self.assertEqual(expected_name, self._get_diagnostic_value(entry, "name")) + self.assertEqual(expected_version, self._get_diagnostic_value(entry, "version")) def test_drop_labels(self): """Labels should not be copied in instrumentation log""" diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 1c47a343b..6a9a7fd84 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -34,7 +34,6 @@ def _make_credentials(): class TestClient(unittest.TestCase): - PROJECT = "PROJECT" PROJECT_PATH = f"projects/{PROJECT}" LOGGER_NAME = "LOGGER_NAME" @@ -843,14 +842,12 @@ def test_setup_logging(self): (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) + self.assertIn(handler, client._handlers) handler.transport.worker.stop() expected_kwargs = { "excluded_loggers": ( - "google.cloud", - "google.auth", - "google_auth_httplib2", "google.api_core.bidi", "werkzeug", ), @@ -886,14 +883,12 @@ def test_setup_logging_w_extra_kwargs(self): self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) self.assertEqual(handler.labels, labels) + self.assertIn(handler, client._handlers) handler.transport.worker.stop() expected_kwargs = { "excluded_loggers": ( - "google.cloud", - "google.auth", - "google_auth_httplib2", "google.api_core.bidi", "werkzeug", ), @@ -901,9 +896,206 @@ def test_setup_logging_w_extra_kwargs(self): } self.assertEqual(kwargs, expected_kwargs) + def test_setup_logging_w_extra_kwargs_structured_log(self): + import io + from google.cloud.logging.handlers import StructuredLogHandler + from google.cloud.logging import Resource + from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE -class _Connection(object): + name = "test-logger" + resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, StructuredLogHandler) + + expected_kwargs = { + "excluded_loggers": ( + "google.api_core.bidi", + "werkzeug", + ), + "log_level": 20, + } + self.assertEqual(kwargs, expected_kwargs) + self.assertIn(handler, client._handlers) + + def test_flush_handlers_cloud_logging_handler(self): + import io + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource + + name = "test-logger" + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, CloudLoggingHandler) + + handler.flush = mock.Mock() + client.flush_handlers() + handler.flush.assert_called_once_with() + + def test_flush_handlers_cloud_logging_handler_no_setup_logging(self): + from google.cloud.logging.handlers import CloudLoggingHandler + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + handler = CloudLoggingHandler(client) + self.assertIn(handler, client._handlers) + + handler.flush = mock.Mock() + client.flush_handlers() + handler.flush.assert_called_once_with() + + def test_flush_handlers_structured_log(self): + import io + from google.cloud.logging.handlers import StructuredLogHandler + from google.cloud.logging import Resource + from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE + + name = "test-logger" + resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, StructuredLogHandler) + + handler.flush = mock.Mock() + client.flush_handlers() + handler.flush.assert_called_once_with() + + def test_close_cloud_logging_handler(self): + import contextlib + import io + from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging import Resource + + name = "test-logger" + resource = Resource("resource_type", {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, CloudLoggingHandler) + + handler.close = mock.Mock() + with contextlib.closing(client): + pass + handler.close.assert_called_once_with() + + def test_close_cloud_logging_handler_no_setup_logging(self): + import contextlib + from google.cloud.logging.handlers import CloudLoggingHandler + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + handler = CloudLoggingHandler(client) + self.assertIn(handler, client._handlers) + + handler.close = mock.Mock() + with contextlib.closing(client): + pass + + handler.close.assert_called_once_with() + + def test_close_structured_log_handler(self): + import contextlib + import io + from google.cloud.logging.handlers import StructuredLogHandler + from google.cloud.logging import Resource + from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE + + name = "test-logger" + resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"}) + labels = {"handler_label": "value"} + stream = io.BytesIO() + + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, _use_grpc=False + ) + + with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked: + client.setup_logging( + name=name, resource=resource, labels=labels, stream=stream + ) + + self.assertEqual(len(mocked.mock_calls), 1) + _, args, kwargs = mocked.mock_calls[0] + + (handler,) = args + self.assertIsInstance(handler, StructuredLogHandler) + + handler.close = mock.Mock() + with contextlib.closing(client): + pass + + handler.close.assert_called_once_with() + + +class _Connection(object): _called_with = None def __init__(self, *responses): diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index 6f3af684f..382674ebd 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -79,7 +79,6 @@ def test_w_str(self): class TestLogEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -201,14 +200,14 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime - from google.cloud._helpers import UTC + from datetime import timezone from google.cloud.logging import Resource klass = self._get_target_class() client = _Client(self.PROJECT) SEVERITY = "CRITICAL" IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) LABELS = {"foo": "bar", "baz": "qux"} @@ -284,11 +283,11 @@ def test_from_api_repr_w_loggers_no_logger_match(self): def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime from datetime import timedelta - from google.cloud._helpers import UTC + from datetime import timezone client = _Client(self.PROJECT) IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) @@ -342,11 +341,11 @@ def test_from_api_repr_w_loggers_w_logger_match(self): def test_from_api_repr_w_folder_path(self): from datetime import datetime from datetime import timedelta - from google.cloud._helpers import UTC + from datetime import timezone client = _Client(self.PROJECT) IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) @@ -469,7 +468,6 @@ def test_to_api_repr_explicit(self): class TestTextEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -557,7 +555,6 @@ def test_to_api_repr_explicit(self): class TestStructEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -659,7 +656,6 @@ def test_to_api_repr_explicit(self): class TestProtobufEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -743,6 +739,45 @@ def test_to_api_repr_proto_defaults(self): } self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_proto_inner_struct_field(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = "test.log" + inner_struct = Struct(fields={"foo": Value(string_value="bar")}) + message = Struct(fields={"inner": Value(struct_value=inner_struct)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_proto_inner_list_field(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import ListValue + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = "test.log" + lines = ListValue( + values=[Value(string_value="line1"), Value(string_value="line2")] + ) + message = Struct(fields={"lines": Value(list_value=lines)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 16c89959b..cdb56747d 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -28,7 +28,6 @@ def _make_credentials(): class TestLogger(unittest.TestCase): - PROJECT = "test-project" LOGGER_NAME = "logger-name" TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"' @@ -1086,7 +1085,6 @@ def test_first_log_emits_instrumentation(self): class TestBatch(unittest.TestCase): - PROJECT = "test-project" @staticmethod @@ -1847,7 +1845,6 @@ def test_batch_error_gets_context(self): class _Logger(object): - labels = None def __init__(self, name="NAME", project="PROJECT"): @@ -1855,7 +1852,6 @@ def __init__(self, name="NAME", project="PROJECT"): class _DummyLoggingAPI(object): - _write_entries_called_with = None def write_entries( @@ -1909,7 +1905,6 @@ class _Bugout(Exception): class _Connection(object): - _called_with = None def __init__(self, *responses): diff --git a/tests/unit/test_metric.py b/tests/unit/test_metric.py index 83b49d02d..f36ae3b2a 100644 --- a/tests/unit/test_metric.py +++ b/tests/unit/test_metric.py @@ -16,7 +16,6 @@ class TestMetric(unittest.TestCase): - PROJECT = "test-project" METRIC_NAME = "metric-name" FULL_METRIC_NAME = f"projects/{PROJECT}/metrics/{METRIC_NAME}" diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py new file mode 100644 index 000000000..4369ca2c1 --- /dev/null +++ b/tests/unit/test_packaging.py @@ -0,0 +1,56 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-logging``. + + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + + google_otherpkg = tmp_path / "google" / "otherpkg" + google_otherpkg.mkdir() + google_otherpkg.joinpath("__init__.py").write_text("") + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-logging``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + + google_cloud_otherpkg = tmp_path / "google" / "cloud" / "otherpkg" + google_cloud_otherpkg.mkdir() + google_cloud_otherpkg.joinpath("__init__.py").write_text("") + + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + + for pkg in [ + "google.othermod", + "google.cloud.othermod", + "google.otherpkg", + "google.cloud.otherpkg", + "google.cloud.logging", + ]: + cmd = [sys.executable, "-c", f"import {pkg}"] + subprocess.check_output(cmd, env=env) + + for module in ["google.othermod", "google.cloud.othermod"]: + cmd = [sys.executable, "-m", module] + subprocess.check_output(cmd, env=env) diff --git a/tests/unit/test_sink.py b/tests/unit/test_sink.py index 1e4852ab5..b5005b057 100644 --- a/tests/unit/test_sink.py +++ b/tests/unit/test_sink.py @@ -16,7 +16,6 @@ class TestSink(unittest.TestCase): - PROJECT = "test-project" PROJECT_PATH = f"projects/{PROJECT}" SINK_NAME = "sink-name" pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy