diff --git a/.github/workflows/integration-tests-against-emulator-with-multiplexed-session.yaml b/.github/workflows/integration-tests-against-emulator-with-multiplexed-session.yaml new file mode 100644 index 0000000000..4714d8ee40 --- /dev/null +++ b/.github/workflows/integration-tests-against-emulator-with-multiplexed-session.yaml @@ -0,0 +1,34 @@ +on: + push: + branches: + - main + pull_request: +name: Run Spanner integration tests against emulator with multiplexed sessions +jobs: + system-tests: + runs-on: ubuntu-latest + + services: + emulator: + image: gcr.io/cloud-spanner-emulator/emulator:latest + ports: + - 9010:9010 + - 9020:9020 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: 3.8 + - name: Install nox + run: python -m pip install nox + - name: Run system tests + run: nox -s system + env: + SPANNER_EMULATOR_HOST: localhost:9010 + GOOGLE_CLOUD_PROJECT: emulator-test-project + GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE: true + GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS: true + GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_PARTITIONED_OPS: true diff --git a/.github/workflows/mock_server_tests.yaml b/.github/workflows/mock_server_tests.yaml index 2da5320071..e93ac9905c 100644 --- a/.github/workflows/mock_server_tests.yaml +++ b/.github/workflows/mock_server_tests.yaml @@ -5,7 +5,7 @@ on: pull_request: name: Run Spanner tests against an in-mem mock server jobs: - system-tests: + mock-server-tests: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/presubmit.yaml b/.github/workflows/presubmit.yaml new file mode 100644 index 0000000000..2d6132bd97 --- /dev/null +++ b/.github/workflows/presubmit.yaml @@ -0,0 +1,42 @@ +on: + push: + branches: + - main + pull_request: +name: Presubmit checks +permissions: + contents: read + pull-requests: write +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: 3.8 + - name: Install nox + run: python -m pip install nox + - name: Check formatting + run: nox -s lint + units: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{matrix.python}} + - name: Install nox + run: python -m pip install nox + - name: Run unit tests + run: nox -s unit-${{matrix.python}} diff --git a/.kokoro/presubmit/integration-multiplexed-sessions-enabled.cfg b/.kokoro/presubmit/integration-multiplexed-sessions-enabled.cfg new file mode 100644 index 0000000000..77ed7f9bab --- /dev/null +++ b/.kokoro/presubmit/integration-multiplexed-sessions-enabled.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run a subset of all nox sessions +env_vars: { + key: "NOX_SESSION" + value: "unit-3.8 unit-3.12 system-3.8" +} + +env_vars: { + key: "GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS" + value: "true" +} + +env_vars: { + key: "GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_PARTITIONED_OPS" + value: "true" +} \ No newline at end of file diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg index b158096f0a..14db9152d9 100644 --- a/.kokoro/presubmit/presubmit.cfg +++ b/.kokoro/presubmit/presubmit.cfg @@ -1,7 +1,7 @@ # Format: //devtools/kokoro/config/proto/build.proto -# Disable system tests. +# Only run a subset of all nox sessions env_vars: { - key: "RUN_SYSTEM_TESTS" - value: "false" + key: "NOX_SESSION" + value: "unit-3.8 unit-3.12 cover docs docfx" } diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 62c031f3f8..37e12350e3 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.54.0" + ".": "3.55.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index ee56542822..d7f8ac42c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,40 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## [3.55.0](https://github.com/googleapis/python-spanner/compare/v3.54.0...v3.55.0) (2025-05-28) + + +### Features + +* Add a `last` field in the `PartialResultSet` ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* Add support for multiplexed sessions ([#1381](https://github.com/googleapis/python-spanner/issues/1381)) ([97d7268](https://github.com/googleapis/python-spanner/commit/97d7268ac12a57d9d116ee3d9475580e1e7e07ae)) +* Add throughput_mode to UpdateDatabaseDdlRequest to be used by Spanner Migration Tool. See https://github.com/GoogleCloudPlatform/spanner-migration-tool ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* Support fine-grained permissions database roles in connect ([#1338](https://github.com/googleapis/python-spanner/issues/1338)) ([064d9dc](https://github.com/googleapis/python-spanner/commit/064d9dc3441a617cbc80af6e16493bc42c89b3c9)) + + +### Bug Fixes + +* E2E tracing metadata append issue ([#1357](https://github.com/googleapis/python-spanner/issues/1357)) ([3943885](https://github.com/googleapis/python-spanner/commit/394388595a312f60b423dfbfd7aaf2724cc4454f)) +* Pass through kwargs in dbapi connect ([#1368](https://github.com/googleapis/python-spanner/issues/1368)) ([aae8d61](https://github.com/googleapis/python-spanner/commit/aae8d6161580c88354d813fe75a297c318f1c2c7)) +* Remove setup.cfg configuration for creating universal wheels ([#1324](https://github.com/googleapis/python-spanner/issues/1324)) ([e064474](https://github.com/googleapis/python-spanner/commit/e0644744d7f3fcea42b461996fc0ee22d4218599)) + + +### Documentation + +* A comment for field `chunked_value` in message `.google.spanner.v1.PartialResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `precommit_token` in message `.google.spanner.v1.PartialResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `precommit_token` in message `.google.spanner.v1.ResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `query_plan` in message `.google.spanner.v1.ResultSetStats` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `row_count_lower_bound` in message `.google.spanner.v1.ResultSetStats` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `row_type` in message `.google.spanner.v1.ResultSetMetadata` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `rows` in message `.google.spanner.v1.ResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `stats` in message `.google.spanner.v1.PartialResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `stats` in message `.google.spanner.v1.ResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for field `values` in message `.google.spanner.v1.PartialResultSet` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for message `ResultSetMetadata` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* A comment for message `ResultSetStats` is changed ([d532d57](https://github.com/googleapis/python-spanner/commit/d532d57fd5908ecd7bc9dfff73695715cc4b1ebe)) +* Fix markdown formatting in transactions page ([#1377](https://github.com/googleapis/python-spanner/issues/1377)) ([de322f8](https://github.com/googleapis/python-spanner/commit/de322f89642a3c13b6b1d4b9b1a2cdf4c8f550fb)) + ## [3.54.0](https://github.com/googleapis/python-spanner/compare/v3.53.0...v3.54.0) (2025-04-28) diff --git a/README.rst b/README.rst index 7e75685f2e..085587e51d 100644 --- a/README.rst +++ b/README.rst @@ -252,6 +252,13 @@ Connection API represents a wrap-around for Python Spanner API, written in accor result = cursor.fetchall() +If using [fine-grained access controls](https://cloud.google.com/spanner/docs/access-with-fgac) you can pass a ``database_role`` argument to connect as that role: + +.. code:: python + + connection = connect("instance-id", "database-id", database_role='your-role') + + Aborted Transactions Retry Mechanism ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/transaction-usage.rst b/docs/transaction-usage.rst index 4781cfa148..78026bf5a4 100644 --- a/docs/transaction-usage.rst +++ b/docs/transaction-usage.rst @@ -5,7 +5,8 @@ A :class:`~google.cloud.spanner_v1.transaction.Transaction` represents a transaction: when the transaction commits, it will send any accumulated mutations to the server. -To understand more about how transactions work, visit [Transaction](https://cloud.google.com/spanner/docs/reference/rest/v1/Transaction). +To understand more about how transactions work, visit +`Transaction `_. To learn more about how to use them in the Python client, continue reading. @@ -90,8 +91,8 @@ any of the records already exists. Update records using a Transaction ---------------------------------- -:meth:`Transaction.update` updates one or more existing records in a table. Fails -if any of the records does not already exist. +:meth:`Transaction.update` updates one or more existing records in a table. +Fails if any of the records does not already exist. .. code:: python @@ -178,9 +179,9 @@ Using :meth:`~Database.run_in_transaction` Rather than calling :meth:`~Transaction.commit` or :meth:`~Transaction.rollback` manually, you should use :meth:`~Database.run_in_transaction` to run the -function that you need. The transaction's :meth:`~Transaction.commit` method +function that you need. The transaction's :meth:`~Transaction.commit` method will be called automatically if the ``with`` block exits without raising an -exception. The function will automatically be retried for +exception. The function will automatically be retried for :class:`~google.api_core.exceptions.Aborted` errors, but will raise on :class:`~google.api_core.exceptions.GoogleAPICallError` and :meth:`~Transaction.rollback` will be called on all others. @@ -188,25 +189,30 @@ exception. The function will automatically be retried for .. code:: python def _unit_of_work(transaction): - transaction.insert( - 'citizens', columns=['email', 'first_name', 'last_name', 'age'], + 'citizens', + columns=['email', 'first_name', 'last_name', 'age'], values=[ ['phred@exammple.com', 'Phred', 'Phlyntstone', 32], ['bharney@example.com', 'Bharney', 'Rhubble', 31], - ]) + ] + ) transaction.update( - 'citizens', columns=['email', 'age'], + 'citizens', + columns=['email', 'age'], values=[ ['phred@exammple.com', 33], ['bharney@example.com', 32], - ]) + ] + ) ... - transaction.delete('citizens', - keyset['bharney@example.com', 'nonesuch@example.com']) + transaction.delete( + 'citizens', + keyset=['bharney@example.com', 'nonesuch@example.com'] + ) db.run_in_transaction(_unit_of_work) @@ -242,7 +248,7 @@ If an exception is raised inside the ``with`` block, the transaction's ... transaction.delete('citizens', - keyset['bharney@example.com', 'nonesuch@example.com']) + keyset=['bharney@example.com', 'nonesuch@example.com']) Begin a Transaction diff --git a/google/cloud/spanner_admin_database_v1/__init__.py b/google/cloud/spanner_admin_database_v1/__init__.py index 3d6ac19f3c..674f0de7a2 100644 --- a/google/cloud/spanner_admin_database_v1/__init__.py +++ b/google/cloud/spanner_admin_database_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/gapic_version.py b/google/cloud/spanner_admin_database_v1/gapic_version.py index 9f7e08d550..b7c2622867 100644 --- a/google/cloud/spanner_admin_database_v1/gapic_version.py +++ b/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.54.0" # {x-release-please-version} +__version__ = "3.55.0" # {x-release-please-version} diff --git a/google/cloud/spanner_admin_database_v1/services/__init__.py b/google/cloud/spanner_admin_database_v1/services/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/google/cloud/spanner_admin_database_v1/services/__init__.py +++ b/google/cloud/spanner_admin_database_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py index cae7306643..580a7ed2a2 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index 584cd6711e..05b090d5a0 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -405,7 +406,10 @@ async def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -557,7 +561,10 @@ async def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, create_statement]) + flattened_params = [parent, create_statement] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -676,7 +683,10 @@ async def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -842,7 +852,10 @@ async def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1010,7 +1023,10 @@ async def sample_update_database_ddl(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, statements]) + flattened_params = [database, statements] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1122,7 +1138,10 @@ async def sample_drop_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1233,7 +1252,10 @@ async def sample_get_database_ddl(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1379,7 +1401,10 @@ async def sample_set_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1523,7 +1548,10 @@ async def sample_get_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1648,7 +1676,10 @@ async def sample_test_iam_permissions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) + flattened_params = [resource, permissions] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1796,7 +1827,10 @@ async def sample_create_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup, backup_id]) + flattened_params = [parent, backup, backup_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1973,7 +2007,10 @@ async def sample_copy_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_id, source_backup, expire_time]) + flattened_params = [parent, backup_id, source_backup, expire_time] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2094,7 +2131,10 @@ async def sample_get_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2217,7 +2257,10 @@ async def sample_update_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup, update_mask]) + flattened_params = [backup, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2322,7 +2365,10 @@ async def sample_delete_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2433,7 +2479,10 @@ async def sample_list_backups(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2604,7 +2653,10 @@ async def sample_restore_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database_id, backup]) + flattened_params = [parent, database_id, backup] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2740,7 +2792,10 @@ async def sample_list_database_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2877,7 +2932,10 @@ async def sample_list_backup_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3003,7 +3061,10 @@ async def sample_list_database_roles(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3134,7 +3195,10 @@ async def sample_add_split_points(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, split_points]) + flattened_params = [database, split_points] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3266,7 +3330,10 @@ async def sample_create_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_schedule, backup_schedule_id]) + flattened_params = [parent, backup_schedule, backup_schedule_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3380,7 +3447,10 @@ async def sample_get_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3506,7 +3576,10 @@ async def sample_update_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_schedule, update_mask]) + flattened_params = [backup_schedule, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3612,7 +3685,10 @@ async def sample_delete_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3725,7 +3801,10 @@ async def sample_list_backup_schedules(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4011,5 +4090,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("DatabaseAdminAsyncClient",) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index 1eced63261..7fc4313641 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -962,7 +963,10 @@ def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1111,7 +1115,10 @@ def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, create_statement]) + flattened_params = [parent, create_statement] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1227,7 +1234,10 @@ def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1390,7 +1400,10 @@ def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1555,7 +1568,10 @@ def sample_update_database_ddl(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, statements]) + flattened_params = [database, statements] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1664,7 +1680,10 @@ def sample_drop_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1772,7 +1791,10 @@ def sample_get_database_ddl(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1915,7 +1937,10 @@ def sample_set_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2060,7 +2085,10 @@ def sample_get_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2186,7 +2214,10 @@ def sample_test_iam_permissions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) + flattened_params = [resource, permissions] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2335,7 +2366,10 @@ def sample_create_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup, backup_id]) + flattened_params = [parent, backup, backup_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2509,7 +2543,10 @@ def sample_copy_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_id, source_backup, expire_time]) + flattened_params = [parent, backup_id, source_backup, expire_time] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2627,7 +2664,10 @@ def sample_get_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2747,7 +2787,10 @@ def sample_update_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup, update_mask]) + flattened_params = [backup, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2849,7 +2892,10 @@ def sample_delete_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2957,7 +3003,10 @@ def sample_list_backups(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3125,7 +3174,10 @@ def sample_restore_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database_id, backup]) + flattened_params = [parent, database_id, backup] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3258,7 +3310,10 @@ def sample_list_database_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3392,7 +3447,10 @@ def sample_list_backup_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3515,7 +3573,10 @@ def sample_list_database_roles(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3643,7 +3704,10 @@ def sample_add_split_points(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, split_points]) + flattened_params = [database, split_points] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3772,7 +3836,10 @@ def sample_create_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_schedule, backup_schedule_id]) + flattened_params = [parent, backup_schedule, backup_schedule_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3883,7 +3950,10 @@ def sample_get_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4006,7 +4076,10 @@ def sample_update_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_schedule, update_mask]) + flattened_params = [backup_schedule, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4109,7 +4182,10 @@ def sample_delete_backup_schedule(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4219,7 +4295,10 @@ def sample_list_backup_schedules(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4517,5 +4596,7 @@ def cancel_operation( gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("DatabaseAdminClient",) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py index fe760684db..c9e2e14d52 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py index a20c366a95..23ba04ea21 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py index e0c3e7c1d9..c53cc16026 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup @@ -43,6 +44,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class DatabaseAdminTransport(abc.ABC): """Abstract transport class for DatabaseAdmin.""" diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py index 00d7e84672..de999d6a71 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -81,12 +81,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py index 624bc2d25b..b8ea344fbd 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py index 30adfa8b07..efdeb5628a 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -69,6 +70,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class DatabaseAdminRestInterceptor: """Interceptor for DatabaseAdmin. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py index b55ca50b62..107024f245 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/types/__init__.py b/google/cloud/spanner_admin_database_v1/types/__init__.py index 70db52cd35..e6fde68af0 100644 --- a/google/cloud/spanner_admin_database_v1/types/__init__.py +++ b/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/types/backup.py b/google/cloud/spanner_admin_database_v1/types/backup.py index acec22244f..15e1e2836c 100644 --- a/google/cloud/spanner_admin_database_v1/types/backup.py +++ b/google/cloud/spanner_admin_database_v1/types/backup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/google/cloud/spanner_admin_database_v1/types/backup_schedule.py index 9637480731..130c6879a3 100644 --- a/google/cloud/spanner_admin_database_v1/types/backup_schedule.py +++ b/google/cloud/spanner_admin_database_v1/types/backup_schedule.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/types/common.py b/google/cloud/spanner_admin_database_v1/types/common.py index 9dd3ff8bb6..3b78c4b153 100644 --- a/google/cloud/spanner_admin_database_v1/types/common.py +++ b/google/cloud/spanner_admin_database_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py index 3a9c0d8edd..8ba9c6cf11 100644 --- a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -570,6 +570,10 @@ class UpdateDatabaseDdlRequest(proto.Message): For more details, see protobuffer `self description `__. + throughput_mode (bool): + Optional. This field is exposed to be used by the Spanner + Migration Tool. For more details, see + `SMT `__. """ database: str = proto.Field( @@ -588,6 +592,10 @@ class UpdateDatabaseDdlRequest(proto.Message): proto.BYTES, number=4, ) + throughput_mode: bool = proto.Field( + proto.BOOL, + number=5, + ) class DdlStatementActionInfo(proto.Message): diff --git a/google/cloud/spanner_admin_instance_v1/__init__.py b/google/cloud/spanner_admin_instance_v1/__init__.py index f5b8d7277f..5368b59895 100644 --- a/google/cloud/spanner_admin_instance_v1/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/gapic_version.py b/google/cloud/spanner_admin_instance_v1/gapic_version.py index 9f7e08d550..b7c2622867 100644 --- a/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ b/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.54.0" # {x-release-please-version} +__version__ = "3.55.0" # {x-release-please-version} diff --git a/google/cloud/spanner_admin_instance_v1/services/__init__.py b/google/cloud/spanner_admin_instance_v1/services/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/google/cloud/spanner_admin_instance_v1/services/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py index aab66a65b0..51df22ca2e 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index 33e93d9b90..49de66d0c3 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -399,7 +400,10 @@ async def sample_list_instance_configs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -524,7 +528,10 @@ async def sample_get_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -706,7 +713,10 @@ async def sample_create_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_config, instance_config_id]) + flattened_params = [parent, instance_config, instance_config_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -900,7 +910,10 @@ async def sample_update_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance_config, update_mask]) + flattened_params = [instance_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1022,7 +1035,10 @@ async def sample_delete_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1144,7 +1160,10 @@ async def sample_list_instance_config_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1274,7 +1293,10 @@ async def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1403,7 +1425,10 @@ async def sample_list_instance_partitions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1527,7 +1552,10 @@ async def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1705,7 +1733,10 @@ async def sample_create_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) + flattened_params = [parent, instance_id, instance] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1898,7 +1929,10 @@ async def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, field_mask]) + flattened_params = [instance, field_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2022,7 +2056,10 @@ async def sample_delete_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2161,7 +2198,10 @@ async def sample_set_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2301,7 +2341,10 @@ async def sample_get_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2423,7 +2466,10 @@ async def sample_test_iam_permissions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) + flattened_params = [resource, permissions] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2535,7 +2581,10 @@ async def sample_get_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2717,7 +2766,10 @@ async def sample_create_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_partition, instance_partition_id]) + flattened_params = [parent, instance_partition, instance_partition_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2839,7 +2891,10 @@ async def sample_delete_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3021,7 +3076,10 @@ async def sample_update_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance_partition, field_mask]) + flattened_params = [instance_partition, field_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3164,7 +3222,10 @@ async def sample_list_instance_partition_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3398,5 +3459,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("InstanceAdminAsyncClient",) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py index 11c880416b..51d7482520 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -848,7 +849,10 @@ def sample_list_instance_configs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -970,7 +974,10 @@ def sample_get_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1149,7 +1156,10 @@ def sample_create_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_config, instance_config_id]) + flattened_params = [parent, instance_config, instance_config_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1340,7 +1350,10 @@ def sample_update_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance_config, update_mask]) + flattened_params = [instance_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1459,7 +1472,10 @@ def sample_delete_instance_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1578,7 +1594,10 @@ def sample_list_instance_config_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1707,7 +1726,10 @@ def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1833,7 +1855,10 @@ def sample_list_instance_partitions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1954,7 +1979,10 @@ def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2129,7 +2157,10 @@ def sample_create_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) + flattened_params = [parent, instance_id, instance] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2319,7 +2350,10 @@ def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, field_mask]) + flattened_params = [instance, field_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2440,7 +2474,10 @@ def sample_delete_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2576,7 +2613,10 @@ def sample_set_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2717,7 +2757,10 @@ def sample_get_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2840,7 +2883,10 @@ def sample_test_iam_permissions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) + flattened_params = [resource, permissions] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2953,7 +2999,10 @@ def sample_get_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3132,7 +3181,10 @@ def sample_create_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_partition, instance_partition_id]) + flattened_params = [parent, instance_partition, instance_partition_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3253,7 +3305,10 @@ def sample_delete_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3434,7 +3489,10 @@ def sample_update_instance_partition(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance_partition, field_mask]) + flattened_params = [instance_partition, field_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3576,7 +3634,10 @@ def sample_list_instance_partition_operations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3814,5 +3875,7 @@ def __exit__(self, type, value, traceback): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("InstanceAdminClient",) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py index 7bbdee1e7a..d4a3dde6d8 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py index b25510676e..24e71739c7 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py index 5f7711559c..3bcd32e6af 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -37,6 +38,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class InstanceAdminTransport(abc.ABC): """Abstract transport class for InstanceAdmin.""" diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index e31c5c48b7..16ca5cc338 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -75,12 +75,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index 2b382a0085..b28b9d1ed4 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py index a728491812..571e303bfc 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -63,6 +64,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class InstanceAdminRestInterceptor: """Interceptor for InstanceAdmin. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py index 546f0b8ae3..906fb7b224 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/types/__init__.py b/google/cloud/spanner_admin_instance_v1/types/__init__.py index 38ba52abc3..9bd2de3e47 100644 --- a/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/types/common.py b/google/cloud/spanner_admin_instance_v1/types/common.py index e7f6885c99..548e61c38e 100644 --- a/google/cloud/spanner_admin_instance_v1/types/common.py +++ b/google/cloud/spanner_admin_instance_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py index 01a6584f68..44dc52ddc4 100644 --- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_dbapi/connection.py b/google/cloud/spanner_dbapi/connection.py index a615a282b5..6a21769f13 100644 --- a/google/cloud/spanner_dbapi/connection.py +++ b/google/cloud/spanner_dbapi/connection.py @@ -17,6 +17,8 @@ from google.api_core.exceptions import Aborted from google.api_core.gapic_v1.client_info import ClientInfo +from google.auth.credentials import AnonymousCredentials + from google.cloud import spanner_v1 as spanner from google.cloud.spanner_dbapi import partition_helper from google.cloud.spanner_dbapi.batch_dml_executor import BatchMode, BatchDmlExecutor @@ -720,6 +722,7 @@ def connect( user_agent=None, client=None, route_to_leader_enabled=True, + database_role=None, **kwargs, ): """Creates a connection to a Google Cloud Spanner database. @@ -763,6 +766,10 @@ def connect( disable leader aware routing. Disabling leader aware routing would route all requests in RW/PDML transactions to the closest region. + :type database_role: str + :param database_role: (Optional) The database role to connect as when using + fine-grained access controls. + **kwargs: Initial value for connection variables. @@ -784,11 +791,15 @@ def connect( route_to_leader_enabled=route_to_leader_enabled, ) else: + client_options = None + if isinstance(credentials, AnonymousCredentials): + client_options = kwargs.get("client_options") client = spanner.Client( project=project, credentials=credentials, client_info=client_info, route_to_leader_enabled=route_to_leader_enabled, + client_options=client_options, ) else: if project is not None and client.project != project: @@ -797,8 +808,10 @@ def connect( instance = client.instance(instance_id) database = None if database_id: - database = instance.database(database_id, pool=pool) - conn = Connection(instance, database) + database = instance.database( + database_id, pool=pool, database_role=database_role + ) + conn = Connection(instance, database, **kwargs) if pool is not None: conn._own_pool = False diff --git a/google/cloud/spanner_dbapi/transaction_helper.py b/google/cloud/spanner_dbapi/transaction_helper.py index f8f5bfa584..744aeb7b43 100644 --- a/google/cloud/spanner_dbapi/transaction_helper.py +++ b/google/cloud/spanner_dbapi/transaction_helper.py @@ -162,7 +162,7 @@ def add_execute_statement_for_retry( self._last_statement_details_per_cursor[cursor] = last_statement_result_details self._statement_result_details_list.append(last_statement_result_details) - def retry_transaction(self): + def retry_transaction(self, default_retry_delay=None): """Retry the aborted transaction. All the statements executed in the original transaction @@ -202,7 +202,9 @@ def retry_transaction(self): raise RetryAborted(RETRY_ABORTED_ERROR, ex) return except Aborted as ex: - delay = _get_retry_delay(ex.errors[0], attempt) + delay = _get_retry_delay( + ex.errors[0], attempt, default_retry_delay=default_retry_delay + ) if delay: time.sleep(delay) diff --git a/google/cloud/spanner_v1/_helpers.py b/google/cloud/spanner_v1/_helpers.py index 73a7679a6e..7b86a5653f 100644 --- a/google/cloud/spanner_v1/_helpers.py +++ b/google/cloud/spanner_v1/_helpers.py @@ -510,6 +510,7 @@ def _metadata_with_prefix(prefix, **kw): def _retry_on_aborted_exception( func, deadline, + default_retry_delay=None, ): """ Handles retry logic for Aborted exceptions, considering the deadline. @@ -520,7 +521,12 @@ def _retry_on_aborted_exception( attempts += 1 return func() except Aborted as exc: - _delay_until_retry(exc, deadline=deadline, attempts=attempts) + _delay_until_retry( + exc, + deadline=deadline, + attempts=attempts, + default_retry_delay=default_retry_delay, + ) continue @@ -603,12 +609,12 @@ def _metadata_with_span_context(metadata: List[Tuple[str, str]], **kw) -> None: Returns: None """ - if HAS_OPENTELEMETRY_INSTALLED: + if HAS_OPENTELEMETRY_INSTALLED and metadata is not None: metadata.append(("x-goog-spanner-end-to-end-tracing", "true")) inject(setter=OpenTelemetryContextSetter(), carrier=metadata) -def _delay_until_retry(exc, deadline, attempts): +def _delay_until_retry(exc, deadline, attempts, default_retry_delay=None): """Helper for :meth:`Session.run_in_transaction`. Detect retryable abort, and impose server-supplied delay. @@ -628,7 +634,7 @@ def _delay_until_retry(exc, deadline, attempts): if now >= deadline: raise - delay = _get_retry_delay(cause, attempts) + delay = _get_retry_delay(cause, attempts, default_retry_delay=default_retry_delay) if delay is not None: if now + delay > deadline: raise @@ -636,7 +642,7 @@ def _delay_until_retry(exc, deadline, attempts): time.sleep(delay) -def _get_retry_delay(cause, attempts): +def _get_retry_delay(cause, attempts, default_retry_delay=None): """Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` @@ -658,6 +664,8 @@ def _get_retry_delay(cause, attempts): retry_info.ParseFromString(retry_info_pb) nanos = retry_info.retry_delay.nanos return retry_info.retry_delay.seconds + nanos / 1.0e9 + if default_retry_delay is not None: + return default_retry_delay return 2**attempts + random.random() @@ -699,6 +707,10 @@ def __radd__(self, n): """ return self.__add__(n) + def reset(self): + with self.__lock: + self.__value = 0 + def _metadata_with_request_id(*args, **kwargs): return with_request_id(*args, **kwargs) diff --git a/google/cloud/spanner_v1/batch.py b/google/cloud/spanner_v1/batch.py index 39e29d4d41..2194cb9c0d 100644 --- a/google/cloud/spanner_v1/batch.py +++ b/google/cloud/spanner_v1/batch.py @@ -26,6 +26,7 @@ _metadata_with_prefix, _metadata_with_leader_aware_routing, _merge_Transaction_Options, + AtomicCounter, ) from google.cloud.spanner_v1._opentelemetry_tracing import trace_call from google.cloud.spanner_v1 import RequestOptions @@ -248,18 +249,32 @@ def commit( trace_attributes, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): - method = functools.partial( - api.commit, - request=request, - metadata=metadata, - ) + ) as span, MetricsCapture(): + + def wrapped_method(*args, **kwargs): + method = functools.partial( + api.commit, + request=request, + metadata=database.metadata_with_request_id( + # This code is retried due to ABORTED, hence nth_request + # should be increased. attempt can only be increased if + # we encounter UNAVAILABLE or INTERNAL. + getattr(database, "_next_nth_request", 0), + 1, + metadata, + span, + ), + ) + return method(*args, **kwargs) + deadline = time.time() + kwargs.get( "timeout_secs", DEFAULT_RETRY_TIMEOUT_SECS ) + default_retry_delay = kwargs.get("default_retry_delay", None) response = _retry_on_aborted_exception( - method, + wrapped_method, deadline=deadline, + default_retry_delay=default_retry_delay, ) self.committed = response.commit_timestamp self.commit_stats = response.commit_stats @@ -371,14 +386,25 @@ def batch_write(self, request_options=None, exclude_txn_from_change_streams=Fals trace_attributes, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): - method = functools.partial( - api.batch_write, - request=request, - metadata=metadata, - ) + ) as span, MetricsCapture(): + attempt = AtomicCounter(0) + nth_request = getattr(database, "_next_nth_request", 0) + + def wrapped_method(*args, **kwargs): + method = functools.partial( + api.batch_write, + request=request, + metadata=database.metadata_with_request_id( + nth_request, + attempt.increment(), + metadata, + span, + ), + ) + return method(*args, **kwargs) + response = _retry( - method, + wrapped_method, allowed_exceptions={ InternalServerError: _check_rst_stream_error, }, diff --git a/google/cloud/spanner_v1/client.py b/google/cloud/spanner_v1/client.py index e201f93e9b..e0e8c44058 100644 --- a/google/cloud/spanner_v1/client.py +++ b/google/cloud/spanner_v1/client.py @@ -70,6 +70,7 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_CLOUD_MONITORING_INSTALLED = False +from google.cloud.spanner_v1._helpers import AtomicCounter _CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__) EMULATOR_ENV_VAR = "SPANNER_EMULATOR_HOST" @@ -182,6 +183,8 @@ class Client(ClientWithProject): SCOPE = (SPANNER_ADMIN_SCOPE,) """The scopes required for Google Cloud Spanner.""" + NTH_CLIENT = AtomicCounter() + def __init__( self, project=None, @@ -241,7 +244,9 @@ def __init__( meter_provider = MeterProvider( metric_readers=[ PeriodicExportingMetricReader( - CloudMonitoringMetricsExporter(), + CloudMonitoringMetricsExporter( + project_id=project, credentials=credentials + ), export_interval_millis=METRIC_EXPORT_INTERVAL_MS, ) ] @@ -261,6 +266,12 @@ def __init__( "default_transaction_options must be an instance of DefaultTransactionOptions" ) self._default_transaction_options = default_transaction_options + self._nth_client_id = Client.NTH_CLIENT.increment() + self._nth_request = AtomicCounter(0) + + @property + def _next_nth_request(self): + return self._nth_request.increment() @property def credentials(self): diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 03c6e5119f..1273e016da 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -53,6 +53,7 @@ from google.cloud.spanner_v1._helpers import ( _metadata_with_prefix, _metadata_with_leader_aware_routing, + _metadata_with_request_id, ) from google.cloud.spanner_v1.batch import Batch from google.cloud.spanner_v1.batch import MutationGroups @@ -61,6 +62,8 @@ from google.cloud.spanner_v1.pool import BurstyPool from google.cloud.spanner_v1.pool import SessionCheckout from google.cloud.spanner_v1.session import Session +from google.cloud.spanner_v1.session_options import SessionOptions +from google.cloud.spanner_v1.database_sessions_manager import DatabaseSessionsManager from google.cloud.spanner_v1.snapshot import _restart_on_unavailable from google.cloud.spanner_v1.snapshot import Snapshot from google.cloud.spanner_v1.streamed import StreamedResultSet @@ -151,6 +154,9 @@ class Database(object): _spanner_api: SpannerClient = None + __transport_lock = threading.Lock() + __transports_to_channel_id = dict() + def __init__( self, database_id, @@ -188,6 +194,7 @@ def __init__( self._instance._client.default_transaction_options ) self._proto_descriptors = proto_descriptors + self._channel_id = 0 # It'll be created when _spanner_api is created. if pool is None: pool = BurstyPool(database_role=database_role) @@ -195,6 +202,9 @@ def __init__( self._pool = pool pool.bind(self) + self.session_options = SessionOptions() + self._sessions_manager = DatabaseSessionsManager(self, pool) + @classmethod def from_pb(cls, database_pb, instance, pool=None): """Creates an instance of this class from a protobuf. @@ -446,8 +456,32 @@ def spanner_api(self): client_info=client_info, client_options=client_options, ) + + with self.__transport_lock: + transport = self._spanner_api._transport + channel_id = self.__transports_to_channel_id.get(transport, None) + if channel_id is None: + channel_id = len(self.__transports_to_channel_id) + 1 + self.__transports_to_channel_id[transport] = channel_id + self._channel_id = channel_id + return self._spanner_api + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + if span is None: + span = get_current_span() + + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented @@ -490,7 +524,10 @@ def create(self): database_dialect=self._database_dialect, proto_descriptors=self._proto_descriptors, ) - future = api.create_database(request=request, metadata=metadata) + future = api.create_database( + request=request, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) return future def exists(self): @@ -506,7 +543,12 @@ def exists(self): metadata = _metadata_with_prefix(self.name) try: - api.get_database_ddl(database=self.name, metadata=metadata) + api.get_database_ddl( + database=self.name, + metadata=self.metadata_with_request_id( + self._next_nth_request, 1, metadata + ), + ) except NotFound: return False return True @@ -523,10 +565,16 @@ def reload(self): """ api = self._instance._client.database_admin_api metadata = _metadata_with_prefix(self.name) - response = api.get_database_ddl(database=self.name, metadata=metadata) + response = api.get_database_ddl( + database=self.name, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) self._ddl_statements = tuple(response.statements) self._proto_descriptors = response.proto_descriptors - response = api.get_database(name=self.name, metadata=metadata) + response = api.get_database( + name=self.name, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) self._state = DatabasePB.State(response.state) self._create_time = response.create_time self._restore_info = response.restore_info @@ -571,7 +619,10 @@ def update_ddl(self, ddl_statements, operation_id="", proto_descriptors=None): proto_descriptors=proto_descriptors, ) - future = api.update_database_ddl(request=request, metadata=metadata) + future = api.update_database_ddl( + request=request, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) return future def update(self, fields): @@ -609,7 +660,9 @@ def update(self, fields): metadata = _metadata_with_prefix(self.name) future = api.update_database( - database=database_pb, update_mask=field_mask, metadata=metadata + database=database_pb, + update_mask=field_mask, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), ) return future @@ -622,7 +675,10 @@ def drop(self): """ api = self._instance._client.database_admin_api metadata = _metadata_with_prefix(self.name) - api.drop_database(database=self.name, metadata=metadata) + api.drop_database( + database=self.name, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) def execute_partitioned_dml( self, @@ -708,10 +764,22 @@ def execute_pdml(): "CloudSpanner.Database.execute_partitioned_pdml", observability_options=self.observability_options, ) as span, MetricsCapture(): - with SessionCheckout(self._pool) as session: + from google.cloud.spanner_v1.session_options import TransactionType + + session = self._sessions_manager.get_session( + TransactionType.PARTITIONED + ) + try: add_span_event(span, "Starting BeginTransaction") txn = api.begin_transaction( - session=session.name, options=txn_options, metadata=metadata + session=session.name, + options=txn_options, + metadata=self.metadata_with_request_id( + self._next_nth_request, + 1, + metadata, + span, + ), ) txn_selector = TransactionSelector(id=txn.id) @@ -724,6 +792,7 @@ def execute_pdml(): query_options=query_options, request_options=request_options, ) + method = functools.partial( api.execute_streaming_sql, metadata=metadata, @@ -736,15 +805,30 @@ def execute_pdml(): metadata=metadata, transaction_selector=txn_selector, observability_options=self.observability_options, + request_id_manager=self, ) result_set = StreamedResultSet(iterator) list(result_set) # consume all partials return result_set.stats.row_count_lower_bound + finally: + self._sessions_manager.put_session(session) return _retry_on_aborted(execute_pdml, DEFAULT_RETRY_BACKOFF)() + @property + def _next_nth_request(self): + if self._instance and self._instance._client: + return self._instance._client._next_nth_request + return 1 + + @property + def _nth_client_id(self): + if self._instance and self._instance._client: + return self._instance._client._nth_client_id + return 0 + def session(self, labels=None, database_role=None): """Factory to create a session for this database. @@ -965,7 +1049,7 @@ def restore(self, source): ) future = api.restore_database( request=request, - metadata=metadata, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), ) return future @@ -1034,7 +1118,10 @@ def list_database_roles(self, page_size=None): parent=self.name, page_size=page_size, ) - return api.list_database_roles(request=request, metadata=metadata) + return api.list_database_roles( + request=request, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) def table(self, table_id): """Factory to create a table object within this database. @@ -1118,7 +1205,10 @@ def get_iam_policy(self, policy_version=None): requested_policy_version=policy_version ), ) - response = api.get_iam_policy(request=request, metadata=metadata) + response = api.get_iam_policy( + request=request, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) return response def set_iam_policy(self, policy): @@ -1140,7 +1230,10 @@ def set_iam_policy(self, policy): resource=self.name, policy=policy, ) - response = api.set_iam_policy(request=request, metadata=metadata) + response = api.set_iam_policy( + request=request, + metadata=self.metadata_with_request_id(self._next_nth_request, 1, metadata), + ) return response @property @@ -1159,6 +1252,15 @@ def observability_options(self): opts["db_name"] = self.name return opts + @property + def sessions_manager(self): + """Returns the database sessions manager. + + :rtype: :class:`~google.cloud.spanner_v1.database_sessions_manager.DatabaseSessionsManager` + :returns: The sessions manager for this database. + """ + return self._sessions_manager + class BatchCheckout(object): """Context manager for using a batch from a database. @@ -1209,8 +1311,12 @@ def __init__( def __enter__(self): """Begin ``with`` block.""" + from google.cloud.spanner_v1.session_options import TransactionType + current_span = get_current_span() - session = self._session = self._database._pool.get() + session = self._session = self._database.sessions_manager.get_session( + TransactionType.READ_WRITE + ) add_span_event(current_span, "Using session", {"id": session.session_id}) batch = self._batch = Batch(session) if self._request_options.transaction_tag: @@ -1235,7 +1341,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): "CommitStats: {}".format(self._batch.commit_stats), extra={"commit_stats": self._batch.commit_stats}, ) - self._database._pool.put(self._session) + self._database.sessions_manager.put_session(self._session) current_span = get_current_span() add_span_event( current_span, @@ -1263,7 +1369,11 @@ def __init__(self, database): def __enter__(self): """Begin ``with`` block.""" - session = self._session = self._database._pool.get() + from google.cloud.spanner_v1.session_options import TransactionType + + session = self._session = self._database.sessions_manager.get_session( + TransactionType.READ_WRITE + ) return MutationGroups(session) def __exit__(self, exc_type, exc_val, exc_tb): @@ -1274,7 +1384,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): if not self._session.exists(): self._session = self._database._pool._new_session() self._session.create() - self._database._pool.put(self._session) + self._database.sessions_manager.put_session(self._session) class SnapshotCheckout(object): @@ -1302,7 +1412,11 @@ def __init__(self, database, **kw): def __enter__(self): """Begin ``with`` block.""" - session = self._session = self._database._pool.get() + from google.cloud.spanner_v1.session_options import TransactionType + + session = self._session = self._database.sessions_manager.get_session( + TransactionType.READ_ONLY + ) return Snapshot(session, **self._kw) def __exit__(self, exc_type, exc_val, exc_tb): @@ -1313,7 +1427,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): if not self._session.exists(): self._session = self._database._pool._new_session() self._session.create() - self._database._pool.put(self._session) + self._database.sessions_manager.put_session(self._session) class BatchSnapshot(object): @@ -1393,10 +1507,13 @@ def _get_session(self): all partitions have been processed. """ if self._session is None: - session = self._session = self._database.session() - if self._session_id is None: - session.create() - else: + from google.cloud.spanner_v1.session_options import TransactionType + + # Use sessions manager for partition operations + session = self._session = self._database.sessions_manager.get_session( + TransactionType.PARTITIONED + ) + if self._session_id is not None: session._session_id = self._session_id return self._session @@ -1807,7 +1924,8 @@ def close(self): from all the partitions. """ if self._session is not None: - self._session.delete() + if not self._session.is_multiplexed: + self._session.delete() def _check_ddl_statements(value): diff --git a/google/cloud/spanner_v1/database_sessions_manager.py b/google/cloud/spanner_v1/database_sessions_manager.py new file mode 100644 index 0000000000..d9a0c06f52 --- /dev/null +++ b/google/cloud/spanner_v1/database_sessions_manager.py @@ -0,0 +1,249 @@ +# Copyright 2025 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import datetime +import threading +import time +import weakref + +from google.api_core.exceptions import MethodNotImplemented + +from google.cloud.spanner_v1._opentelemetry_tracing import ( + get_current_span, + add_span_event, +) +from google.cloud.spanner_v1.session import Session +from google.cloud.spanner_v1.session_options import TransactionType + + +class DatabaseSessionsManager(object): + """Manages sessions for a Cloud Spanner database. + Sessions can be checked out from the database session manager for a specific + transaction type using :meth:`get_session`, and returned to the session manager + using :meth:`put_session`. + The sessions returned by the session manager depend on the client's session options (see + :class:`~google.cloud.spanner_v1.session_options.SessionOptions`) and the provided session + pool (see :class:`~google.cloud.spanner_v1.pool.AbstractSessionPool`). + :type database: :class:`~google.cloud.spanner_v1.database.Database` + :param database: The database to manage sessions for. + :type pool: :class:`~google.cloud.spanner_v1.pool.AbstractSessionPool` + :param pool: The pool to get non-multiplexed sessions from. + """ + + # Intervals for the maintenance thread to check and refresh the multiplexed session. + _MAINTENANCE_THREAD_POLLING_INTERVAL = datetime.timedelta(minutes=10) + _MAINTENANCE_THREAD_REFRESH_INTERVAL = datetime.timedelta(days=7) + + def __init__(self, database, pool): + self._database = database + self._pool = pool + + # Declare multiplexed session attributes. When a multiplexed session for the + # database session manager is created, a maintenance thread is initialized to + # periodically delete and recreate the multiplexed session so that it remains + # valid. Because of this concurrency, we need to use a lock whenever we access + # the multiplexed session to avoid any race conditions. We also create an event + # so that the thread can terminate if the use of multiplexed session has been + # disabled for all transactions. + self._multiplexed_session = None + self._multiplexed_session_maintenance_thread = None + self._multiplexed_session_lock = threading.Lock() + self._is_multiplexed_sessions_disabled_event = threading.Event() + + @property + def _logger(self): + """The logger used by this database session manager. + + :rtype: :class:`logging.Logger` + :returns: The logger. + """ + return self._database.logger + + def get_session(self, transaction_type: TransactionType) -> Session: + """Returns a session for the given transaction type from the database session manager. + :rtype: :class:`~google.cloud.spanner_v1.session.Session` + :returns: a session for the given transaction type. + """ + + session_options = self._database.session_options + use_multiplexed = session_options.use_multiplexed(transaction_type) + + if use_multiplexed and transaction_type == TransactionType.READ_WRITE: + raise NotImplementedError( + f"Multiplexed sessions are not yet supported for {transaction_type} transactions." + ) + + if use_multiplexed: + try: + session = self._get_multiplexed_session() + + # If multiplexed sessions are not supported, disable + # them for all transactions and return a non-multiplexed session. + except MethodNotImplemented: + self._disable_multiplexed_sessions() + session = self._pool.get() + + else: + session = self._pool.get() + + add_span_event( + get_current_span(), + "Using session", + {"id": session.session_id, "multiplexed": session.is_multiplexed}, + ) + + return session + + def put_session(self, session: Session) -> None: + """Returns the session to the database session manager. + :type session: :class:`~google.cloud.spanner_v1.session.Session` + :param session: The session to return to the database session manager. + """ + + add_span_event( + get_current_span(), + "Returning session", + {"id": session.session_id, "multiplexed": session.is_multiplexed}, + ) + + # No action is needed for multiplexed sessions: the session + # pool is only used for managing non-multiplexed sessions, + # since they can only process one transaction at a time. + if not session.is_multiplexed: + self._pool.put(session) + + def _get_multiplexed_session(self) -> Session: + """Returns a multiplexed session from the database session manager. + If the multiplexed session is not defined, creates a new multiplexed + session and starts a maintenance thread to periodically delete and + recreate it so that it remains valid. Otherwise, simply returns the + current multiplexed session. + :raises MethodNotImplemented: + if multiplexed sessions are not supported. + :rtype: :class:`~google.cloud.spanner_v1.session.Session` + :returns: a multiplexed session. + """ + + with self._multiplexed_session_lock: + if self._multiplexed_session is None: + self._multiplexed_session = self._build_multiplexed_session() + + # Build and start a thread to maintain the multiplexed session. + self._multiplexed_session_maintenance_thread = ( + self._build_maintenance_thread() + ) + self._multiplexed_session_maintenance_thread.start() + + return self._multiplexed_session + + def _build_multiplexed_session(self) -> Session: + """Builds and returns a new multiplexed session for the database session manager. + :raises MethodNotImplemented: + if multiplexed sessions are not supported. + :rtype: :class:`~google.cloud.spanner_v1.session.Session` + :returns: a new multiplexed session. + """ + + session = Session( + database=self._database, + database_role=self._database.database_role, + is_multiplexed=True, + ) + + session.create() + + self._logger.info("Created multiplexed session.") + + return session + + def _disable_multiplexed_sessions(self) -> None: + """Disables multiplexed sessions for all transactions.""" + + self._multiplexed_session = None + self._is_multiplexed_sessions_disabled_event.set() + self._database.session_options.disable_multiplexed(self._logger) + + def _build_maintenance_thread(self) -> threading.Thread: + """Builds and returns a multiplexed session maintenance thread for + the database session manager. This thread will periodically delete + and recreate the multiplexed session to ensure that it is always valid. + :rtype: :class:`threading.Thread` + :returns: a multiplexed session maintenance thread. + """ + + # Use a weak reference to the database session manager to avoid + # creating a circular reference that would prevent the database + # session manager from being garbage collected. + session_manager_ref = weakref.ref(self) + + return threading.Thread( + target=self._maintain_multiplexed_session, + name=f"maintenance-multiplexed-session-{self._multiplexed_session.name}", + args=[session_manager_ref], + daemon=True, + ) + + @staticmethod + def _maintain_multiplexed_session(session_manager_ref) -> None: + """Maintains the multiplexed session for the database session manager. + This method will delete and recreate the referenced database session manager's + multiplexed session to ensure that it is always valid. The method will run until + the database session manager is deleted, the multiplexed session is deleted, or + building a multiplexed session fails. + :type session_manager_ref: :class:`_weakref.ReferenceType` + :param session_manager_ref: A weak reference to the database session manager. + """ + + session_manager = session_manager_ref() + if session_manager is None: + return + + polling_interval_seconds = ( + session_manager._MAINTENANCE_THREAD_POLLING_INTERVAL.total_seconds() + ) + refresh_interval_seconds = ( + session_manager._MAINTENANCE_THREAD_REFRESH_INTERVAL.total_seconds() + ) + + session_created_time = time.time() + + while True: + # Terminate the thread is the database session manager has been deleted. + session_manager = session_manager_ref() + if session_manager is None: + return + + # Terminate the thread if the use of multiplexed sessions has been disabled. + if session_manager._is_multiplexed_sessions_disabled_event.is_set(): + return + + # Wait for until the refresh interval has elapsed. + if time.time() - session_created_time < refresh_interval_seconds: + time.sleep(polling_interval_seconds) + continue + + with session_manager._multiplexed_session_lock: + session_manager._multiplexed_session.delete() + + try: + session_manager._multiplexed_session = ( + session_manager._build_multiplexed_session() + ) + + # Disable multiplexed sessions for all transactions and terminate + # the thread if building a multiplexed session fails. + except MethodNotImplemented: + session_manager._disable_multiplexed_sessions() + return + + session_created_time = time.time() diff --git a/google/cloud/spanner_v1/gapic_version.py b/google/cloud/spanner_v1/gapic_version.py index 9f7e08d550..b7c2622867 100644 --- a/google/cloud/spanner_v1/gapic_version.py +++ b/google/cloud/spanner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.54.0" # {x-release-please-version} +__version__ = "3.55.0" # {x-release-please-version} diff --git a/google/cloud/spanner_v1/metrics/metrics_exporter.py b/google/cloud/spanner_v1/metrics/metrics_exporter.py index e10cf6a2f1..68da08b400 100644 --- a/google/cloud/spanner_v1/metrics/metrics_exporter.py +++ b/google/cloud/spanner_v1/metrics/metrics_exporter.py @@ -26,6 +26,7 @@ from typing import Optional, List, Union, NoReturn, Tuple, Dict import google.auth +from google.auth import credentials as ga_credentials from google.api.distribution_pb2 import ( # pylint: disable=no-name-in-module Distribution, ) @@ -111,6 +112,7 @@ def __init__( self, project_id: Optional[str] = None, client: Optional["MetricServiceClient"] = None, + credentials: Optional[ga_credentials.Credentials] = None, ): """Initialize a custom exporter to send metrics for the Spanner Service Metrics.""" # Default preferred_temporality is all CUMULATIVE so need to customize @@ -121,6 +123,7 @@ def __init__( transport=MetricServiceGrpcTransport( channel=MetricServiceGrpcTransport.create_channel( options=_OPTIONS, + credentials=credentials, ) ) ) diff --git a/google/cloud/spanner_v1/pool.py b/google/cloud/spanner_v1/pool.py index 0c4dd5a63b..1c82f66ed0 100644 --- a/google/cloud/spanner_v1/pool.py +++ b/google/cloud/spanner_v1/pool.py @@ -256,7 +256,12 @@ def bind(self, database): ) resp = api.batch_create_sessions( request=request, - metadata=metadata, + metadata=database.metadata_with_request_id( + database._next_nth_request, + 1, + metadata, + span, + ), ) add_span_event( @@ -444,6 +449,7 @@ def put(self, session): self._sessions.put_nowait(session) except queue.Full: try: + # Sessions from pools are never multiplexed, so we can always delete them session.delete() except NotFound: pass @@ -561,7 +567,12 @@ def bind(self, database): while returned_session_count < self.size: resp = api.batch_create_sessions( request=request, - metadata=metadata, + metadata=database.metadata_with_request_id( + database._next_nth_request, + 1, + metadata, + span, + ), ) add_span_event( diff --git a/google/cloud/spanner_v1/request_id_header.py b/google/cloud/spanner_v1/request_id_header.py index 8376778273..c095bc88e2 100644 --- a/google/cloud/spanner_v1/request_id_header.py +++ b/google/cloud/spanner_v1/request_id_header.py @@ -33,10 +33,32 @@ def generate_rand_uint64(): REQ_RAND_PROCESS_ID = generate_rand_uint64() +X_GOOG_SPANNER_REQUEST_ID_SPAN_ATTR = "x_goog_spanner_request_id" -def with_request_id(client_id, channel_id, nth_request, attempt, other_metadata=[]): +def with_request_id( + client_id, channel_id, nth_request, attempt, other_metadata=[], span=None +): req_id = f"{REQ_ID_VERSION}.{REQ_RAND_PROCESS_ID}.{client_id}.{channel_id}.{nth_request}.{attempt}" - all_metadata = other_metadata.copy() + all_metadata = (other_metadata or []).copy() all_metadata.append((REQ_ID_HEADER_KEY, req_id)) + + if span is not None: + span.set_attribute(X_GOOG_SPANNER_REQUEST_ID_SPAN_ATTR, req_id) + return all_metadata + + +def parse_request_id(request_id_str): + splits = request_id_str.split(".") + version, rand_process_id, client_id, channel_id, nth_request, nth_attempt = list( + map(lambda v: int(v), splits) + ) + return ( + version, + rand_process_id, + client_id, + channel_id, + nth_request, + nth_attempt, + ) diff --git a/google/cloud/spanner_v1/services/__init__.py b/google/cloud/spanner_v1/services/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/google/cloud/spanner_v1/services/__init__.py +++ b/google/cloud/spanner_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/services/spanner/__init__.py b/google/cloud/spanner_v1/services/spanner/__init__.py index e8184d7477..3af41fdc08 100644 --- a/google/cloud/spanner_v1/services/spanner/__init__.py +++ b/google/cloud/spanner_v1/services/spanner/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index a8bdb5ee4c..fbacbddcce 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,6 +39,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -374,7 +375,10 @@ async def sample_create_session(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -500,7 +504,10 @@ async def sample_batch_create_sessions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, session_count]) + flattened_params = [database, session_count] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -610,7 +617,10 @@ async def sample_get_session(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -722,7 +732,10 @@ async def sample_list_sessions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -834,7 +847,10 @@ async def sample_delete_session(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1490,7 +1506,10 @@ async def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, options]) + flattened_params = [session, options] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1650,8 +1669,9 @@ async def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [session, transaction_id, mutations, single_use_transaction] + flattened_params = [session, transaction_id, mutations, single_use_transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -1773,7 +1793,10 @@ async def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, transaction_id]) + flattened_params = [session, transaction_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2116,7 +2139,10 @@ async def sample_batch_write(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, mutation_groups]) + flattened_params = [session, mutation_groups] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2172,5 +2198,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("SpannerAsyncClient",) diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index e0768ce742..e853b2dfd5 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -67,6 +68,7 @@ from google.cloud.spanner_v1.types import result_set from google.cloud.spanner_v1.types import spanner from google.cloud.spanner_v1.types import transaction +from google.cloud.spanner_v1.metrics.metrics_interceptor import MetricsInterceptor from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -74,7 +76,6 @@ from .transports.grpc import SpannerGrpcTransport from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport from .transports.rest import SpannerRestTransport -from google.cloud.spanner_v1.metrics.metrics_interceptor import MetricsInterceptor class SpannerClientMeta(type): @@ -822,7 +823,10 @@ def sample_create_session(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -945,7 +949,10 @@ def sample_batch_create_sessions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, session_count]) + flattened_params = [database, session_count] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1052,7 +1059,10 @@ def sample_get_session(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1161,7 +1171,10 @@ def sample_list_sessions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) + flattened_params = [database] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1270,7 +1283,10 @@ def sample_delete_session(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1915,7 +1931,10 @@ def sample_begin_transaction(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, options]) + flattened_params = [session, options] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2072,8 +2091,9 @@ def sample_commit(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [session, transaction_id, mutations, single_use_transaction] + flattened_params = [session, transaction_id, mutations, single_use_transaction] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -2194,7 +2214,10 @@ def sample_rollback(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, transaction_id]) + flattened_params = [session, transaction_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2532,7 +2555,10 @@ def sample_batch_write(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, mutation_groups]) + flattened_params = [session, mutation_groups] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2592,5 +2618,7 @@ def __exit__(self, type, value, traceback): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("SpannerClient",) diff --git a/google/cloud/spanner_v1/services/spanner/pagers.py b/google/cloud/spanner_v1/services/spanner/pagers.py index 2341e99378..90927b54ee 100644 --- a/google/cloud/spanner_v1/services/spanner/pagers.py +++ b/google/cloud/spanner_v1/services/spanner/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/google/cloud/spanner_v1/services/spanner/transports/__init__.py index e554f96a50..4442420c7f 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/__init__.py +++ b/google/cloud/spanner_v1/services/spanner/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py index 8fa85af24d..d1dfe07291 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.spanner_v1.types import commit_response from google.cloud.spanner_v1.types import result_set @@ -37,6 +38,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class SpannerTransport(abc.ABC): """Abstract transport class for Spanner.""" diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index d325442dc9..148abd592a 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,7 +34,6 @@ from google.cloud.spanner_v1.types import result_set from google.cloud.spanner_v1.types import spanner from google.cloud.spanner_v1.types import transaction - from google.cloud.spanner_v1.metrics.metrics_interceptor import MetricsInterceptor from google.protobuf import empty_pb2 # type: ignore from .base import SpannerTransport, DEFAULT_CLIENT_INFO @@ -76,12 +75,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.spanner.v1.Spanner", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index 475717ae2a..86ac4915d7 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/services/spanner/transports/rest.py b/google/cloud/spanner_v1/services/spanner/transports/rest.py index 344416c265..7ad0a4e24e 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/rest.py +++ b/google/cloud/spanner_v1/services/spanner/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format @@ -39,6 +40,7 @@ from google.cloud.spanner_v1.metrics.metrics_interceptor import MetricsInterceptor from google.protobuf import empty_pb2 # type: ignore + from .rest_base import _BaseSpannerRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -62,6 +64,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class SpannerRestInterceptor: """Interceptor for Spanner. diff --git a/google/cloud/spanner_v1/services/spanner/transports/rest_base.py b/google/cloud/spanner_v1/services/spanner/transports/rest_base.py index 5dab9f539e..e93f5d4b58 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/rest_base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ from google.cloud.spanner_v1.types import result_set from google.cloud.spanner_v1.types import spanner from google.cloud.spanner_v1.types import transaction +from google.cloud.spanner_v1.metrics.metrics_interceptor import MetricsInterceptor from google.protobuf import empty_pb2 # type: ignore @@ -53,6 +54,7 @@ def __init__( always_use_jwt_access: Optional[bool] = False, url_scheme: str = "https", api_audience: Optional[str] = None, + metrics_interceptor: Optional[MetricsInterceptor] = None, ) -> None: """Instantiate the transport. Args: diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index f18ba57582..78db192f30 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -64,17 +64,21 @@ class Session(object): :type database_role: str :param database_role: (Optional) user-assigned database_role for the session. + + :type is_multiplexed: bool + :param is_multiplexed: (Optional) whether this session is a multiplexed session. """ _session_id = None _transaction = None - def __init__(self, database, labels=None, database_role=None): + def __init__(self, database, labels=None, database_role=None, is_multiplexed=False): self._database = database if labels is None: labels = {} self._labels = labels self._database_role = database_role + self._is_multiplexed = is_multiplexed self._last_use_time = datetime.utcnow() def __lt__(self, other): @@ -85,6 +89,15 @@ def session_id(self): """Read-only ID, set by the back-end during :meth:`create`.""" return self._session_id + @property + def is_multiplexed(self): + """Whether this session is a multiplexed session. + + :rtype: bool + :returns: True if this is a multiplexed session, False otherwise. + """ + return self._is_multiplexed + @property def last_use_time(self): """ "Approximate last use time of this session @@ -160,17 +173,31 @@ def create(self): if self._labels: request.session.labels = self._labels + # Set the multiplexed field for multiplexed sessions + if self._is_multiplexed: + request.session.multiplexed = True + observability_options = getattr(self._database, "observability_options", None) + span_name = ( + "CloudSpanner.CreateMultiplexedSession" + if self._is_multiplexed + else "CloudSpanner.CreateSession" + ) with trace_call( - "CloudSpanner.CreateSession", + span_name, self, self._labels, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): + ) as span, MetricsCapture(): session_pb = api.create_session( request=request, - metadata=metadata, + metadata=self._database.metadata_with_request_id( + self._database._next_nth_request, + 1, + metadata, + span, + ), ) self._session_id = session_pb.name.split("/")[-1] @@ -195,7 +222,8 @@ def exists(self): current_span, "Checking if Session exists", {"session.id": self._session_id} ) - api = self._database.spanner_api + database = self._database + api = database.spanner_api metadata = _metadata_with_prefix(self._database.name) if self._database._route_to_leader_enabled: metadata.append( @@ -212,7 +240,15 @@ def exists(self): metadata=metadata, ) as span, MetricsCapture(): try: - api.get_session(name=self.name, metadata=metadata) + api.get_session( + name=self.name, + metadata=database.metadata_with_request_id( + database._next_nth_request, + 1, + metadata, + span, + ), + ) if span: span.set_attribute("session_found", True) except NotFound: @@ -242,8 +278,9 @@ def delete(self): current_span, "Deleting Session", {"session.id": self._session_id} ) - api = self._database.spanner_api - metadata = _metadata_with_prefix(self._database.name) + database = self._database + api = database.spanner_api + metadata = _metadata_with_prefix(database.name) observability_options = getattr(self._database, "observability_options", None) with trace_call( "CloudSpanner.DeleteSession", @@ -254,8 +291,16 @@ def delete(self): }, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): - api.delete_session(name=self.name, metadata=metadata) + ) as span, MetricsCapture(): + api.delete_session( + name=self.name, + metadata=database.metadata_with_request_id( + database._next_nth_request, + 1, + metadata, + span, + ), + ) def ping(self): """Ping the session to keep it alive by executing "SELECT 1". @@ -264,10 +309,17 @@ def ping(self): """ if self._session_id is None: raise ValueError("Session ID not set by back-end") - api = self._database.spanner_api - metadata = _metadata_with_prefix(self._database.name) + database = self._database + api = database.spanner_api request = ExecuteSqlRequest(session=self.name, sql="SELECT 1") - api.execute_sql(request=request, metadata=metadata) + api.execute_sql( + request=request, + metadata=database.metadata_with_request_id( + database._next_nth_request, + 1, + _metadata_with_prefix(database.name), + ), + ) self._last_use_time = datetime.now() def snapshot(self, **kw): @@ -461,6 +513,7 @@ def run_in_transaction(self, func, *args, **kw): reraises any non-ABORT exceptions raised by ``func``. """ deadline = time.time() + kw.pop("timeout_secs", DEFAULT_RETRY_TIMEOUT_SECS) + default_retry_delay = kw.pop("default_retry_delay", None) commit_request_options = kw.pop("commit_request_options", None) max_commit_delay = kw.pop("max_commit_delay", None) transaction_tag = kw.pop("transaction_tag", None) @@ -502,7 +555,11 @@ def run_in_transaction(self, func, *args, **kw): except Aborted as exc: del self._transaction if span: - delay_seconds = _get_retry_delay(exc.errors[0], attempts) + delay_seconds = _get_retry_delay( + exc.errors[0], + attempts, + default_retry_delay=default_retry_delay, + ) attributes = dict(delay_seconds=delay_seconds, cause=str(exc)) attributes.update(span_attributes) add_span_event( @@ -511,7 +568,9 @@ def run_in_transaction(self, func, *args, **kw): attributes, ) - _delay_until_retry(exc, deadline, attempts) + _delay_until_retry( + exc, deadline, attempts, default_retry_delay=default_retry_delay + ) continue except GoogleAPICallError: del self._transaction @@ -539,7 +598,11 @@ def run_in_transaction(self, func, *args, **kw): except Aborted as exc: del self._transaction if span: - delay_seconds = _get_retry_delay(exc.errors[0], attempts) + delay_seconds = _get_retry_delay( + exc.errors[0], + attempts, + default_retry_delay=default_retry_delay, + ) attributes = dict(delay_seconds=delay_seconds) attributes.update(span_attributes) add_span_event( @@ -548,7 +611,9 @@ def run_in_transaction(self, func, *args, **kw): attributes, ) - _delay_until_retry(exc, deadline, attempts) + _delay_until_retry( + exc, deadline, attempts, default_retry_delay=default_retry_delay + ) except GoogleAPICallError: del self._transaction add_span_event( diff --git a/google/cloud/spanner_v1/session_options.py b/google/cloud/spanner_v1/session_options.py new file mode 100644 index 0000000000..12af15f8d1 --- /dev/null +++ b/google/cloud/spanner_v1/session_options.py @@ -0,0 +1,133 @@ +# Copyright 2025 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +from enum import Enum +from logging import Logger + + +class TransactionType(Enum): + """Transaction types for session options.""" + + READ_ONLY = "read-only" + PARTITIONED = "partitioned" + READ_WRITE = "read/write" + + +class SessionOptions(object): + """Represents the session options for the Cloud Spanner Python client. + We can use ::class::`SessionOptions` to determine whether multiplexed sessions + should be used for a specific transaction type with :meth:`use_multiplexed`. The use + of multiplexed session can be disabled for a specific transaction type or for all + transaction types with :meth:`disable_multiplexed`. + """ + + # Environment variables for multiplexed sessions + ENV_VAR_ENABLE_MULTIPLEXED = "GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS" + ENV_VAR_ENABLE_MULTIPLEXED_FOR_PARTITIONED = ( + "GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_PARTITIONED_OPS" + ) + ENV_VAR_ENABLE_MULTIPLEXED_FOR_READ_WRITE = ( + "GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW" + ) + + def __init__(self): + # Internal overrides to disable the use of multiplexed + # sessions in case of runtime errors. + self._is_multiplexed_enabled = { + TransactionType.READ_ONLY: True, + TransactionType.PARTITIONED: True, + TransactionType.READ_WRITE: True, + } + + def use_multiplexed(self, transaction_type: TransactionType) -> bool: + """Returns whether to use multiplexed sessions for the given transaction type. + Multiplexed sessions are enabled for read-only transactions if: + * ENV_VAR_ENABLE_MULTIPLEXED is set to true; and + * multiplexed sessions have not been disabled for read-only transactions. + Multiplexed sessions are enabled for partitioned transactions if: + * ENV_VAR_ENABLE_MULTIPLEXED is set to true; + * ENV_VAR_ENABLE_MULTIPLEXED_FOR_PARTITIONED is set to true; and + * multiplexed sessions have not been disabled for partitioned transactions. + Multiplexed sessions are **currently disabled** for read / write. + :type transaction_type: :class:`TransactionType` + :param transaction_type: the type of transaction to check whether + multiplexed sessions should be used. + """ + + if transaction_type is TransactionType.READ_ONLY: + return self._is_multiplexed_enabled[transaction_type] and self._getenv( + self.ENV_VAR_ENABLE_MULTIPLEXED + ) + + elif transaction_type is TransactionType.PARTITIONED: + return ( + self._is_multiplexed_enabled[transaction_type] + and self._getenv(self.ENV_VAR_ENABLE_MULTIPLEXED) + and self._getenv(self.ENV_VAR_ENABLE_MULTIPLEXED_FOR_PARTITIONED) + ) + + elif transaction_type is TransactionType.READ_WRITE: + return False + + raise ValueError(f"Transaction type {transaction_type} is not supported.") + + def disable_multiplexed( + self, logger: Logger = None, transaction_type: TransactionType = None + ) -> None: + """Disables the use of multiplexed sessions for the given transaction type. + If no transaction type is specified, disables the use of multiplexed sessions + for all transaction types. + :type logger: :class:`Logger` + :param logger: logger to use for logging the disabling the use of multiplexed + sessions. + :type transaction_type: :class:`TransactionType` + :param transaction_type: (Optional) the type of transaction for which to disable + the use of multiplexed sessions. + """ + + disable_multiplexed_log_msg_fstring = ( + "Disabling multiplexed sessions for {transaction_type_value} transactions" + ) + import logging + + if logger is None: + logger = logging.getLogger(__name__) + + if transaction_type is None: + logger.warning( + disable_multiplexed_log_msg_fstring.format(transaction_type_value="all") + ) + for transaction_type in TransactionType: + self._is_multiplexed_enabled[transaction_type] = False + return + + elif transaction_type in self._is_multiplexed_enabled.keys(): + logger.warning( + disable_multiplexed_log_msg_fstring.format( + transaction_type_value=transaction_type.value + ) + ) + self._is_multiplexed_enabled[transaction_type] = False + return + + raise ValueError(f"Transaction type '{transaction_type}' is not supported.") + + @staticmethod + def _getenv(name: str) -> bool: + """Returns the value of the given environment variable as a boolean. + True values are '1' and 'true' (case-insensitive); all other values are + considered false. + """ + env_var = os.getenv(name, "").lower().strip() + return env_var in ["1", "true"] diff --git a/google/cloud/spanner_v1/snapshot.py b/google/cloud/spanner_v1/snapshot.py index 3b18d2c855..b8131db18a 100644 --- a/google/cloud/spanner_v1/snapshot.py +++ b/google/cloud/spanner_v1/snapshot.py @@ -38,6 +38,7 @@ _retry, _check_rst_stream_error, _SessionWrapper, + AtomicCounter, ) from google.cloud.spanner_v1._opentelemetry_tracing import trace_call from google.cloud.spanner_v1.streamed import StreamedResultSet @@ -61,6 +62,7 @@ def _restart_on_unavailable( transaction=None, transaction_selector=None, observability_options=None, + request_id_manager=None, ): """Restart iteration after :exc:`.ServiceUnavailable`. @@ -90,6 +92,8 @@ def _restart_on_unavailable( request.transaction = transaction_selector iterator = None + attempt = 1 + nth_request = getattr(request_id_manager, "_next_nth_request", 0) while True: try: @@ -100,8 +104,16 @@ def _restart_on_unavailable( attributes, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): - iterator = method(request=request, metadata=metadata) + ) as span, MetricsCapture(): + iterator = method( + request=request, + metadata=request_id_manager.metadata_with_request_id( + nth_request, + attempt, + metadata, + span, + ), + ) for item in iterator: item_buffer.append(item) # Setting the transaction id because the transaction begin was inlined for first rpc. @@ -124,12 +136,21 @@ def _restart_on_unavailable( attributes, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): + ) as span, MetricsCapture(): request.resume_token = resume_token if transaction is not None: transaction_selector = transaction._make_txn_selector() request.transaction = transaction_selector - iterator = method(request=request) + attempt += 1 + iterator = method( + request=request, + metadata=request_id_manager.metadata_with_request_id( + nth_request, + attempt, + metadata, + span, + ), + ) continue except InternalServerError as exc: resumable_error = any( @@ -145,12 +166,21 @@ def _restart_on_unavailable( attributes, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): + ) as span, MetricsCapture(): request.resume_token = resume_token if transaction is not None: transaction_selector = transaction._make_txn_selector() + attempt += 1 request.transaction = transaction_selector - iterator = method(request=request) + iterator = method( + request=request, + metadata=request_id_manager.metadata_with_request_id( + nth_request, + attempt, + metadata, + span, + ), + ) continue if len(item_buffer) == 0: @@ -329,6 +359,7 @@ def read( data_boost_enabled=data_boost_enabled, directed_read_options=directed_read_options, ) + restart = functools.partial( api.streaming_read, request=request, @@ -352,6 +383,7 @@ def read( trace_attributes, transaction=self, observability_options=observability_options, + request_id_manager=self._session._database, ) self._read_request_count += 1 if self._multi_use: @@ -375,6 +407,7 @@ def read( trace_attributes, transaction=self, observability_options=observability_options, + request_id_manager=self._session._database, ) self._read_request_count += 1 @@ -562,13 +595,16 @@ def execute_sql( data_boost_enabled=data_boost_enabled, directed_read_options=directed_read_options, ) - restart = functools.partial( - api.execute_streaming_sql, - request=request, - metadata=metadata, - retry=retry, - timeout=timeout, - ) + + def wrapped_restart(*args, **kwargs): + restart = functools.partial( + api.execute_streaming_sql, + request=request, + metadata=kwargs.get("metadata", metadata), + retry=retry, + timeout=timeout, + ) + return restart(*args, **kwargs) trace_attributes = {"db.statement": sql} observability_options = getattr(database, "observability_options", None) @@ -577,7 +613,7 @@ def execute_sql( # lock is added to handle the inline begin for first rpc with self._lock: return self._get_streamed_result_set( - restart, + wrapped_restart, request, metadata, trace_attributes, @@ -587,7 +623,7 @@ def execute_sql( ) else: return self._get_streamed_result_set( - restart, + wrapped_restart, request, metadata, trace_attributes, @@ -615,6 +651,7 @@ def _get_streamed_result_set( trace_attributes, transaction=self, observability_options=observability_options, + request_id_manager=self._session._database, ) self._read_request_count += 1 self._execute_sql_count += 1 @@ -717,16 +754,28 @@ def partition_read( extra_attributes=trace_attributes, observability_options=getattr(database, "observability_options", None), metadata=metadata, - ), MetricsCapture(): - method = functools.partial( - api.partition_read, - request=request, - metadata=metadata, - retry=retry, - timeout=timeout, - ) + ) as span, MetricsCapture(): + nth_request = getattr(database, "_next_nth_request", 0) + attempt = AtomicCounter() + + def attempt_tracking_method(): + all_metadata = database.metadata_with_request_id( + nth_request, + attempt.increment(), + metadata, + span, + ) + method = functools.partial( + api.partition_read, + request=request, + metadata=all_metadata, + retry=retry, + timeout=timeout, + ) + return method() + response = _retry( - method, + attempt_tracking_method, allowed_exceptions={InternalServerError: _check_rst_stream_error}, ) @@ -821,16 +870,28 @@ def partition_query( trace_attributes, observability_options=getattr(database, "observability_options", None), metadata=metadata, - ), MetricsCapture(): - method = functools.partial( - api.partition_query, - request=request, - metadata=metadata, - retry=retry, - timeout=timeout, - ) + ) as span, MetricsCapture(): + nth_request = getattr(database, "_next_nth_request", 0) + attempt = AtomicCounter() + + def attempt_tracking_method(): + all_metadata = database.metadata_with_request_id( + nth_request, + attempt.increment(), + metadata, + span, + ) + method = functools.partial( + api.partition_query, + request=request, + metadata=all_metadata, + retry=retry, + timeout=timeout, + ) + return method() + response = _retry( - method, + attempt_tracking_method, allowed_exceptions={InternalServerError: _check_rst_stream_error}, ) @@ -968,15 +1029,27 @@ def begin(self): self._session, observability_options=getattr(database, "observability_options", None), metadata=metadata, - ), MetricsCapture(): - method = functools.partial( - api.begin_transaction, - session=self._session.name, - options=txn_selector.begin, - metadata=metadata, - ) + ) as span, MetricsCapture(): + nth_request = getattr(database, "_next_nth_request", 0) + attempt = AtomicCounter() + + def attempt_tracking_method(): + all_metadata = database.metadata_with_request_id( + nth_request, + attempt.increment(), + metadata, + span, + ) + method = functools.partial( + api.begin_transaction, + session=self._session.name, + options=txn_selector.begin, + metadata=all_metadata, + ) + return method() + response = _retry( - method, + attempt_tracking_method, allowed_exceptions={InternalServerError: _check_rst_stream_error}, ) self._transaction_id = response.id diff --git a/google/cloud/spanner_v1/testing/database_test.py b/google/cloud/spanner_v1/testing/database_test.py index 54afda11e0..5af89fea42 100644 --- a/google/cloud/spanner_v1/testing/database_test.py +++ b/google/cloud/spanner_v1/testing/database_test.py @@ -25,6 +25,7 @@ from google.cloud.spanner_v1.testing.interceptors import ( MethodCountInterceptor, MethodAbortInterceptor, + XGoogRequestIDHeaderInterceptor, ) @@ -34,6 +35,8 @@ class TestDatabase(Database): currently, and we don't want to make changes in the Database class for testing purpose as this is a hack to use interceptors in tests.""" + _interceptors = [] + def __init__( self, database_id, @@ -74,6 +77,8 @@ def spanner_api(self): client_options = client._client_options if self._instance.emulator_host is not None: channel = grpc.insecure_channel(self._instance.emulator_host) + self._x_goog_request_id_interceptor = XGoogRequestIDHeaderInterceptor() + self._interceptors.append(self._x_goog_request_id_interceptor) channel = grpc.intercept_channel(channel, *self._interceptors) transport = SpannerGrpcTransport(channel=channel) self._spanner_api = SpannerClient( @@ -110,3 +115,7 @@ def _create_spanner_client_for_tests(self, client_options, credentials): client_options=client_options, transport=transport, ) + + def reset(self): + if self._x_goog_request_id_interceptor: + self._x_goog_request_id_interceptor.reset() diff --git a/google/cloud/spanner_v1/testing/interceptors.py b/google/cloud/spanner_v1/testing/interceptors.py index a8b015a87d..fd05a6d4b3 100644 --- a/google/cloud/spanner_v1/testing/interceptors.py +++ b/google/cloud/spanner_v1/testing/interceptors.py @@ -13,8 +13,11 @@ # limitations under the License. from collections import defaultdict +import threading + from grpc_interceptor import ClientInterceptor from google.api_core.exceptions import Aborted +from google.cloud.spanner_v1.request_id_header import parse_request_id class MethodCountInterceptor(ClientInterceptor): @@ -63,3 +66,53 @@ def reset(self): self._method_to_abort = None self._count = 0 self._connection = None + + +X_GOOG_REQUEST_ID = "x-goog-spanner-request-id" + + +class XGoogRequestIDHeaderInterceptor(ClientInterceptor): + def __init__(self): + self._unary_req_segments = [] + self._stream_req_segments = [] + self.__lock = threading.Lock() + + def intercept(self, method, request_or_iterator, call_details): + metadata = call_details.metadata + x_goog_request_id = None + for key, value in metadata: + if key == X_GOOG_REQUEST_ID: + x_goog_request_id = value + break + + if not x_goog_request_id: + raise Exception( + f"Missing {X_GOOG_REQUEST_ID} header in {call_details.method}" + ) + + response_or_iterator = method(request_or_iterator, call_details) + streaming = getattr(response_or_iterator, "__iter__", None) is not None + + with self.__lock: + if streaming: + self._stream_req_segments.append( + (call_details.method, parse_request_id(x_goog_request_id)) + ) + else: + self._unary_req_segments.append( + (call_details.method, parse_request_id(x_goog_request_id)) + ) + + return response_or_iterator + + @property + def unary_request_ids(self): + return self._unary_req_segments + + @property + def stream_request_ids(self): + return self._stream_req_segments + + def reset(self): + self._stream_req_segments.clear() + self._unary_req_segments.clear() diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index f60dbbe72a..f8971a6098 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -22,8 +22,6 @@ from google.cloud.spanner_v1 import ( TransactionOptions, ResultSetMetadata, - ExecuteSqlRequest, - ExecuteBatchDmlRequest, ) from google.cloud.spanner_v1.testing.mock_database_admin import DatabaseAdminServicer import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc @@ -107,6 +105,7 @@ def CreateSession(self, request, context): def BatchCreateSessions(self, request, context): self._requests.append(request) + self.mock_spanner.pop_error(context) sessions = [] for i in range(request.session_count): sessions.append( @@ -186,9 +185,7 @@ def BeginTransaction(self, request, context): self._requests.append(request) return self.__create_transaction(request.session, request.options) - def __maybe_create_transaction( - self, request: ExecuteSqlRequest | ExecuteBatchDmlRequest - ): + def __maybe_create_transaction(self, request): started_transaction = None if not request.transaction.begin == TransactionOptions(): started_transaction = self.__create_transaction( diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py index 2f52aaa144..795e158f6a 100644 --- a/google/cloud/spanner_v1/transaction.py +++ b/google/cloud/spanner_v1/transaction.py @@ -32,6 +32,7 @@ from google.cloud.spanner_v1 import ExecuteSqlRequest from google.cloud.spanner_v1 import TransactionSelector from google.cloud.spanner_v1 import TransactionOptions +from google.cloud.spanner_v1._helpers import AtomicCounter from google.cloud.spanner_v1.snapshot import _SnapshotBase from google.cloud.spanner_v1.batch import _BatchBase from google.cloud.spanner_v1._opentelemetry_tracing import add_span_event, trace_call @@ -181,12 +182,22 @@ def begin(self): observability_options=observability_options, metadata=metadata, ) as span, MetricsCapture(): - method = functools.partial( - api.begin_transaction, - session=self._session.name, - options=txn_options, - metadata=metadata, - ) + attempt = AtomicCounter(0) + nth_request = database._next_nth_request + + def wrapped_method(*args, **kwargs): + method = functools.partial( + api.begin_transaction, + session=self._session.name, + options=txn_options, + metadata=database.metadata_with_request_id( + nth_request, + attempt.increment(), + metadata, + span, + ), + ) + return method(*args, **kwargs) def beforeNextRetry(nthRetry, delayInSeconds): add_span_event( @@ -196,7 +207,7 @@ def beforeNextRetry(nthRetry, delayInSeconds): ) response = _retry( - method, + wrapped_method, allowed_exceptions={InternalServerError: _check_rst_stream_error}, beforeNextRetry=beforeNextRetry, ) @@ -217,23 +228,37 @@ def rollback(self): database._route_to_leader_enabled ) ) + observability_options = getattr(database, "observability_options", None) with trace_call( f"CloudSpanner.{type(self).__name__}.rollback", self._session, observability_options=observability_options, metadata=metadata, - ), MetricsCapture(): - method = functools.partial( - api.rollback, - session=self._session.name, - transaction_id=self._transaction_id, - metadata=metadata, - ) + ) as span, MetricsCapture(): + attempt = AtomicCounter(0) + nth_request = database._next_nth_request + + def wrapped_method(*args, **kwargs): + attempt.increment() + method = functools.partial( + api.rollback, + session=self._session.name, + transaction_id=self._transaction_id, + metadata=database.metadata_with_request_id( + nth_request, + attempt.value, + metadata, + span, + ), + ) + return method(*args, **kwargs) + _retry( - method, + wrapped_method, allowed_exceptions={InternalServerError: _check_rst_stream_error}, ) + self.rolled_back = True del self._session._transaction @@ -306,11 +331,22 @@ def commit( add_span_event(span, "Starting Commit") - method = functools.partial( - api.commit, - request=request, - metadata=metadata, - ) + attempt = AtomicCounter(0) + nth_request = database._next_nth_request + + def wrapped_method(*args, **kwargs): + attempt.increment() + method = functools.partial( + api.commit, + request=request, + metadata=database.metadata_with_request_id( + nth_request, + attempt.value, + metadata, + span, + ), + ) + return method(*args, **kwargs) def beforeNextRetry(nthRetry, delayInSeconds): add_span_event( @@ -320,7 +356,7 @@ def beforeNextRetry(nthRetry, delayInSeconds): ) response = _retry( - method, + wrapped_method, allowed_exceptions={InternalServerError: _check_rst_stream_error}, beforeNextRetry=beforeNextRetry, ) @@ -469,19 +505,27 @@ def execute_update( last_statement=last_statement, ) - method = functools.partial( - api.execute_sql, - request=request, - metadata=metadata, - retry=retry, - timeout=timeout, - ) + nth_request = database._next_nth_request + attempt = AtomicCounter(0) + + def wrapped_method(*args, **kwargs): + attempt.increment() + method = functools.partial( + api.execute_sql, + request=request, + metadata=database.metadata_with_request_id( + nth_request, attempt.value, metadata + ), + retry=retry, + timeout=timeout, + ) + return method(*args, **kwargs) if self._transaction_id is None: # lock is added to handle the inline begin for first rpc with self._lock: response = self._execute_request( - method, + wrapped_method, request, metadata, f"CloudSpanner.{type(self).__name__}.execute_update", @@ -499,7 +543,7 @@ def execute_update( self._transaction_id = response.metadata.transaction.id else: response = self._execute_request( - method, + wrapped_method, request, metadata, f"CloudSpanner.{type(self).__name__}.execute_update", @@ -611,19 +655,27 @@ def batch_update( last_statements=last_statement, ) - method = functools.partial( - api.execute_batch_dml, - request=request, - metadata=metadata, - retry=retry, - timeout=timeout, - ) + nth_request = database._next_nth_request + attempt = AtomicCounter(0) + + def wrapped_method(*args, **kwargs): + attempt.increment() + method = functools.partial( + api.execute_batch_dml, + request=request, + metadata=database.metadata_with_request_id( + nth_request, attempt.value, metadata + ), + retry=retry, + timeout=timeout, + ) + return method(*args, **kwargs) if self._transaction_id is None: # lock is added to handle the inline begin for first rpc with self._lock: response = self._execute_request( - method, + wrapped_method, request, metadata, "CloudSpanner.DMLTransaction", @@ -642,7 +694,7 @@ def batch_update( break else: response = self._execute_request( - method, + wrapped_method, request, metadata, "CloudSpanner.DMLTransaction", diff --git a/google/cloud/spanner_v1/types/__init__.py b/google/cloud/spanner_v1/types/__init__.py index 364ed97e6d..afb030c504 100644 --- a/google/cloud/spanner_v1/types/__init__.py +++ b/google/cloud/spanner_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/commit_response.py b/google/cloud/spanner_v1/types/commit_response.py index 4e540e4dfc..2b0c504b6a 100644 --- a/google/cloud/spanner_v1/types/commit_response.py +++ b/google/cloud/spanner_v1/types/commit_response.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/keys.py b/google/cloud/spanner_v1/types/keys.py index 78d246cc16..15272ab689 100644 --- a/google/cloud/spanner_v1/types/keys.py +++ b/google/cloud/spanner_v1/types/keys.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/mutation.py b/google/cloud/spanner_v1/types/mutation.py index 9e17878f81..8389910fc0 100644 --- a/google/cloud/spanner_v1/types/mutation.py +++ b/google/cloud/spanner_v1/types/mutation.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/query_plan.py b/google/cloud/spanner_v1/types/query_plan.py index ca594473f8..d361911f1d 100644 --- a/google/cloud/spanner_v1/types/query_plan.py +++ b/google/cloud/spanner_v1/types/query_plan.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/result_set.py b/google/cloud/spanner_v1/types/result_set.py index 9e7529124c..68119316d2 100644 --- a/google/cloud/spanner_v1/types/result_set.py +++ b/google/cloud/spanner_v1/types/result_set.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -60,16 +60,14 @@ class ResultSet(proto.Message): rows modified, unless executed using the [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - Other fields may or may not be populated, based on the + Other fields might or might not be populated, based on the [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token will be included if the - read-write transaction is on a multiplexed session. The - precommit token with the highest sequence number from this - transaction attempt should be passed to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. This feature is not yet supported and will - result in an UNIMPLEMENTED error. + Optional. A precommit token is included if the read-write + transaction is on a multiplexed session. Pass the precommit + token with the highest sequence number from this transaction + attempt to the [Commit][google.spanner.v1.Spanner.Commit] + request for this transaction. """ metadata: "ResultSetMetadata" = proto.Field( @@ -115,14 +113,14 @@ class PartialResultSet(proto.Message): Most values are encoded based on type as described [here][google.spanner.v1.TypeCode]. - It is possible that the last value in values is "chunked", + It's possible that the last value in values is "chunked", meaning that the rest of the value is sent in subsequent ``PartialResultSet``\ (s). This is denoted by the [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] field. Two or more chunked values can be merged to form a complete value as follows: - - ``bool/number/null``: cannot be chunked + - ``bool/number/null``: can't be chunked - ``string``: concatenate the strings - ``list``: concatenate the lists. If the last element in a list is a ``string``, ``list``, or ``object``, merge it @@ -136,28 +134,28 @@ class PartialResultSet(proto.Message): :: - # Strings are concatenated. + Strings are concatenated. "foo", "bar" => "foobar" - # Lists of non-strings are concatenated. + Lists of non-strings are concatenated. [2, 3], [4] => [2, 3, 4] - # Lists are concatenated, but the last and first elements are merged - # because they are strings. + Lists are concatenated, but the last and first elements are merged + because they are strings. ["a", "b"], ["c", "d"] => ["a", "bc", "d"] - # Lists are concatenated, but the last and first elements are merged - # because they are lists. Recursively, the last and first elements - # of the inner lists are merged because they are strings. + Lists are concatenated, but the last and first elements are merged + because they are lists. Recursively, the last and first elements + of the inner lists are merged because they are strings. ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] - # Non-overlapping object fields are combined. + Non-overlapping object fields are combined. {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} - # Overlapping object fields are merged. + Overlapping object fields are merged. {"a": "1"}, {"a": "2"} => {"a": "12"} - # Examples of merging objects containing lists of strings. + Examples of merging objects containing lists of strings. {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} For a more complete example, suppose a streaming SQL query @@ -176,7 +174,6 @@ class PartialResultSet(proto.Message): { "values": ["orl"] "chunked_value": true - "resume_token": "Bqp2..." } { "values": ["d"] @@ -186,6 +183,13 @@ class PartialResultSet(proto.Message): This sequence of ``PartialResultSet``\ s encodes two rows, one containing the field value ``"Hello"``, and a second containing the field value ``"World" = "W" + "orl" + "d"``. + + Not all ``PartialResultSet``\ s contain a ``resume_token``. + Execution can only be resumed from a previously yielded + ``resume_token``. For the above sequence of + ``PartialResultSet``\ s, resuming the query with + ``"resume_token": "Af65..."`` yields results from the + ``PartialResultSet`` with value "orl". chunked_value (bool): If true, then the final value in [values][google.spanner.v1.PartialResultSet.values] is @@ -205,16 +209,20 @@ class PartialResultSet(proto.Message): by setting [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] and are sent only once with the last response in the stream. - This field will also be present in the last response for DML + This field is also present in the last response for DML statements. precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token will be included if the - read-write transaction is on a multiplexed session. The + Optional. A precommit token is included if the read-write + transaction has multiplexed sessions enabled. Pass the precommit token with the highest sequence number from this - transaction attempt should be passed to the + transaction attempt to the [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. This feature is not yet supported and will - result in an UNIMPLEMENTED error. + transaction. + last (bool): + Optional. Indicates whether this is the last + ``PartialResultSet`` in the stream. The server might + optionally set this field. Clients shouldn't rely on this + field being set in all cases. """ metadata: "ResultSetMetadata" = proto.Field( @@ -245,6 +253,10 @@ class PartialResultSet(proto.Message): number=8, message=gs_transaction.MultiplexedSessionPrecommitToken, ) + last: bool = proto.Field( + proto.BOOL, + number=9, + ) class ResultSetMetadata(proto.Message): @@ -335,7 +347,7 @@ class ResultSetStats(proto.Message): This field is a member of `oneof`_ ``row_count``. row_count_lower_bound (int): - Partitioned DML does not offer exactly-once + Partitioned DML doesn't offer exactly-once semantics, so it returns a lower bound of the rows modified. diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index 978362d357..67f1093448 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/transaction.py b/google/cloud/spanner_v1/types/transaction.py index 0a25f1ea15..d088fa6570 100644 --- a/google/cloud/spanner_v1/types/transaction.py +++ b/google/cloud/spanner_v1/types/transaction.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/spanner_v1/types/type.py b/google/cloud/spanner_v1/types/type.py index e47c1077bb..8996b67388 100644 --- a/google/cloud/spanner_v1/types/type.py +++ b/google/cloud/spanner_v1/types/type.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index cb683afd7e..be3a05c455 100644 --- a/noxfile.py +++ b/noxfile.py @@ -51,6 +51,9 @@ "pytest-cov", "pytest-asyncio", ] +MOCK_SERVER_ADDITIONAL_DEPENDENCIES = [ + "google-cloud-testutils", +] UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] @@ -178,21 +181,6 @@ def install_unittest_dependencies(session, *constraints): # XXX: Dump installed versions to debug OT issue session.run("pip", "list") - # Run py.test against the unit tests with OpenTelemetry. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud.spanner", - "--cov=google.cloud", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) @nox.parametrize( @@ -242,8 +230,11 @@ def mockserver(session): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - # install_unittest_dependencies(session, "-c", constraints_path) - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + standard_deps = ( + UNIT_TEST_STANDARD_DEPENDENCIES + + UNIT_TEST_DEPENDENCIES + + MOCK_SERVER_ADDITIONAL_DEPENDENCIES + ) session.install(*standard_deps, "-c", constraints_path) session.install("-e", ".", "-c", constraints_path) @@ -323,9 +314,12 @@ def system(session, protobuf_implementation, database_dialect): session.skip( "Credentials or emulator host must be set via environment variable" ) - # If POSTGRESQL tests and Emulator, skip the tests - if os.environ.get("SPANNER_EMULATOR_HOST") and database_dialect == "POSTGRESQL": - session.skip("Postgresql is not supported by Emulator yet.") + if not ( + os.environ.get("SPANNER_EMULATOR_HOST") or protobuf_implementation == "python" + ): + session.skip( + "Only run system tests on real Spanner with one protobuf implementation to speed up the build" + ) # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": @@ -359,7 +353,7 @@ def system(session, protobuf_implementation, database_dialect): "SKIP_BACKUP_TESTS": "true", }, ) - if system_test_folder_exists: + elif system_test_folder_exists: session.run( "py.test", "--quiet", @@ -561,30 +555,32 @@ def prerelease_deps(session, protobuf_implementation, database_dialect): system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - "SPANNER_DATABASE_DIALECT": database_dialect, - "SKIP_BACKUP_TESTS": "true", - }, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - "SPANNER_DATABASE_DIALECT": database_dialect, - "SKIP_BACKUP_TESTS": "true", - }, - ) + # Only run system tests for one protobuf implementation on real Spanner to speed up the build. + if os.environ.get("SPANNER_EMULATOR_HOST") or protobuf_implementation == "python": + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + "SPANNER_DATABASE_DIALECT": database_dialect, + "SKIP_BACKUP_TESTS": "true", + }, + ) + elif os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + "SPANNER_DATABASE_DIALECT": database_dialect, + "SKIP_BACKUP_TESTS": "true", + }, + ) diff --git a/owlbot.py b/owlbot.py index 3027a1a8ba..3f72a35599 100644 --- a/owlbot.py +++ b/owlbot.py @@ -80,6 +80,111 @@ def get_staging_dirs( shutil.rmtree("samples/generated_samples", ignore_errors=True) clean_up_generated_samples = False + # Customization for MetricsInterceptor + + assert 6 == s.replace( + [ + library / "google/cloud/spanner_v1/services/spanner/transports/*.py", + library / "google/cloud/spanner_v1/services/spanner/client.py", + ], + """from google.cloud.spanner_v1.types import transaction""", + """from google.cloud.spanner_v1.types import transaction +from google.cloud.spanner_v1.metrics.metrics_interceptor import MetricsInterceptor""", + ) + + assert 1 == s.replace( + library / "google/cloud/spanner_v1/services/spanner/transports/*.py", + """api_audience: Optional\[str\] = None, + \*\*kwargs, + \) -> None: + \"\"\"Instantiate the transport.""", +"""api_audience: Optional[str] = None, + metrics_interceptor: Optional[MetricsInterceptor] = None, + **kwargs, + ) -> None: + \"\"\"Instantiate the transport.""" + ) + + assert 4 == s.replace( + library / "google/cloud/spanner_v1/services/spanner/transports/*.py", + """api_audience: Optional\[str\] = None, + \) -> None: + \"\"\"Instantiate the transport.""", +"""api_audience: Optional[str] = None, + metrics_interceptor: Optional[MetricsInterceptor] = None, + ) -> None: + \"\"\"Instantiate the transport.""" + ) + + assert 1 == s.replace( + library / "google/cloud/spanner_v1/services/spanner/transports/grpc.py", + """\)\n\n self._interceptor = _LoggingClientInterceptor\(\)""", + """) + + # Wrap the gRPC channel with the metric interceptor + if metrics_interceptor is not None: + self._metrics_interceptor = metrics_interceptor + self._grpc_channel = grpc.intercept_channel( + self._grpc_channel, metrics_interceptor + ) + + self._interceptor = _LoggingClientInterceptor()""" + ) + + assert 1 == s.replace( + library / "google/cloud/spanner_v1/services/spanner/transports/grpc.py", + """self._stubs: Dict\[str, Callable\] = \{\}\n\n if api_mtls_endpoint:""", + """self._stubs: Dict[str, Callable] = {} + self._metrics_interceptor = None + + if api_mtls_endpoint:""" + ) + + assert 1 == s.replace( + library / "google/cloud/spanner_v1/services/spanner/client.py", + """# initialize with the provided callable or the passed in class + self._transport = transport_init\( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + \)""", + """# initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + metrics_interceptor=MetricsInterceptor(), + )""", + ) + + assert 12 == s.replace( + library / "tests/unit/gapic/spanner_v1/test_spanner.py", + """api_audience=None,\n(\s+)\)""", + """api_audience=None, + metrics_interceptor=mock.ANY, + )""" + ) + + assert 1 == s.replace( + library / "tests/unit/gapic/spanner_v1/test_spanner.py", + """api_audience="https://language.googleapis.com"\n(\s+)\)""", + """api_audience="https://language.googleapis.com", + metrics_interceptor=mock.ANY, + )""" + ) + s.move( library, excludes=[ @@ -96,11 +201,6 @@ def get_staging_dirs( for library in get_staging_dirs( spanner_admin_instance_default_version, "spanner_admin_instance" ): - s.replace( - library / "google/cloud/spanner_admin_instance_v*/__init__.py", - "from google.cloud.spanner_admin_instance import gapic_version as package_version", - f"from google.cloud.spanner_admin_instance_{library.name} import gapic_version as package_version", - ) s.move( library, excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst", "noxfile.py", "**/gapic_version.py", "testing/constraints-3.7.txt",], @@ -109,11 +209,6 @@ def get_staging_dirs( for library in get_staging_dirs( spanner_admin_database_default_version, "spanner_admin_database" ): - s.replace( - library / "google/cloud/spanner_admin_database_v*/__init__.py", - "from google.cloud.spanner_admin_database import gapic_version as package_version", - f"from google.cloud.spanner_admin_database_{library.name} import gapic_version as package_version", - ) s.move( library, excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst", "noxfile.py", "**/gapic_version.py", "testing/constraints-3.7.txt",], @@ -139,6 +234,7 @@ def get_staging_dirs( "README.rst", ".github/release-please.yml", ".kokoro/test-samples-impl.sh", + ".kokoro/presubmit/presubmit.cfg", ], ) diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index 9bbabdab00..609e70a8c2 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.54.0" + "version": "3.55.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 765c9d46ed..c78d74fd41 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.54.0" + "version": "3.55.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index c9c643d8b2..22a0a46fb4 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.54.0" + "version": "3.55.0" }, "snippets": [ { diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py index 9ecd231125..ff6fcfe598 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py index 43c01f8c9f..3819bbe986 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py index 32b6a49424..d885947bb5 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py index 8095668300..a571e058c9 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py index fab8784592..2ad8881f54 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py index e9a386c6bf..efdcc2457e 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py index e4ae46f99c..60d4b50c3b 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py index aed56f38ec..02b9d1f0e7 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py index ed33381135..47399a8d40 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py index eefa7b1b76..6f112cd8a7 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py index 8e2f065e08..ab10785105 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py index 27aa572802..591d45cb10 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py index 47ee67b992..720417ba65 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py index 0285226164..736dc56a23 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py index 761e554b70..15f279b72d 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py index 6c288a5218..f218cabd83 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py index dfa618063f..58b93a119a 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py index 98d8375bfe..5a37eec975 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py index c061c92be2..4006cac333 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py index 8bcc701ffd..16cffcd78d 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py index d683763f11..fd8621c27b 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py index d0b3144c54..8e84b21f78 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py index 2290e41605..495b557a55 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py index 03c230f0a5..ab729bb9e3 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py index be670085c5..d5d75de78b 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py index 373cefddf8..75e0b48b1b 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py index 006ccfd03d..a56ec9f80e 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py index 3b43e2a421..6383e1b247 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py index b6b8517ff6..25ac53891a 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py index 64c4872f35..89cf82d278 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py index b5108233aa..140e519e07 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py index 9560a10109..9f04036f74 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py index 83d3e9da52..3bc614b232 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py index 1000a4d331..3d4dc965a9 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py index c932837b20..46ec91ce89 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py index 7954a66b66..d39e4759dd 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py index 1309518b23..586dfa56f1 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py index 12124cf524..e6ef221af6 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py index eb8f2a3f80..384c063c61 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py index f2307a1373..a327a8ae13 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py index 471292596d..edade4c950 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py index 6966e294af..28a6746f4a 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py index feb2a5ca93..0e6ea91cb3 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py index 16b7587251..3fd0316dc1 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py index aea59b4c92..95fa2a63f6 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py index 767ae35969..de17dfc86e 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py index 43e2d7ff79..4ef64a0673 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py index aac39bb124..9dbb0148dc 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py index cfc427c768..d5588c3036 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py index 940760d957..ad98e2da9c 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py index 37189cc03b..73297524b9 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py index fe15e7ce86..62ed40bc84 100644 --- a/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py index 4eb7c7aa05..74bd640044 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py index 824b001bbb..c3f266e4c4 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py index 8674445ca1..c5b7616534 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py index 65d4f9f7d3..a22765f53f 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py index dd29783b41..5b5f2e0e26 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py index 355d17496b..f43c5016b5 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py index 91ff61bb4f..262da709aa 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py index 9cdb724363..df83d9e424 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py index b42ccf67c7..9a9c4d7ca1 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py index 4609f23b3c..78ca44d6c2 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py index ee3154a818..72249ef6c7 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py index 3303f219fe..613ac6c070 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py index 73fdfdf2f4..a0b620ae4f 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py index 0afa94e008..cc0d725a03 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py index 32de7eab8b..059eb2a078 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py index aeeb5b5106..9adfb51c2e 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py index fbdcf3ff1f..16e9d3c3c8 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py index d59e5a4cc7..8e84abcf6e 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py index 545112fe50..d617cbb382 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py index 25e9221772..4a246a5bf3 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py index c521261e57..a0580fef7c 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py index ee1d6c10bc..89213b3a2e 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py index 0f405efa17..651b2f88ae 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py index dc94c90e45..a0f120277a 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py index a526600c46..9dedb973f1 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py index 47d40cc011..b2a7549b29 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py index b241b83957..56adc152fe 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py index 7e23ad5fdf..1e65552fc1 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py index c499be7e7d..abe1a1affa 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py index 6fd4ce9b04..f344baff11 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py index 6530706620..ce62120492 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py index 32d1c4f5b1..4621200e0c 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py index b575a3ebec..2443f2127d 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py index 87f95719d9..ba6401602f 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py index 94f406fe86..aa0e05dde3 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py index 0940a69558..80b2a4dd21 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py index 27fc605adb..ecabbf5191 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py index 1705623ab6..f7ea78401c 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py index 7313ce4dd1..1d184f6c58 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py index cc84025f61..42d3c484f8 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py index 8c03a71cb6..56cd2760a1 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py index 8c8bd97801..2340e701e1 100644 --- a/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py b/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py index 1bb7980b78..49e64b4ab8 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py index 03cf8cb51f..ade1da3661 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py index ffd543c558..d1565657e8 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py index 4c2a61570e..9b6621def9 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py b/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py index d83678021f..efdd161715 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py index 7b46b6607a..764dab8aa2 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py b/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py index d58a68ebf7..f61c297d38 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py index 7591f2ee3a..a945bd2234 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py b/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py index 0aa41bfd0f..8cddc00c66 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py index f3eb09c5fd..b9de2d34e0 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py b/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py index daa5434346..9fed1ddca6 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py index bf710daa12..1f2a17e2d1 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py b/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py index 5652a454af..8313fd66a0 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py index 368d9151fc..dd4696b6b2 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py b/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py index 5e90cf9dbf..a12b20f3e9 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py index 1c34213f81..761d0ca251 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py b/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py index 66620d7c7f..86b8eb910e 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py index 5cb5e99785..dc7dba43b8 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py b/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py index 64d5c6ebcb..d2e50f9891 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py index 80b6574586..36d6436b04 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py b/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py index 1a683d2957..95aa4bf818 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py index 691cb51b69..a9533fed0d 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py b/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py index 35071eead0..200fb2f6a2 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py index fe881a1152..d486a3590c 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py b/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py index 7283111d8c..99055ade8b 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py index 981d2bc900..0ca01ac423 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_read_async.py b/samples/generated_samples/spanner_v1_generated_spanner_read_async.py index d067e6c5da..e555865245 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_read_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_read_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py index b87735f096..8f9ee621f3 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py b/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py index fbb8495acc..f99a1b8dd8 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py index 0a3bef9fb9..00b23b21fc 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py b/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py index 65bd926ab4..f79b9a96a1 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py index b7165fea6e..f81ed34b33 100644 --- a/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py +++ b/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 4b4d7b5a2e..f55e456bec 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -2510,6 +2510,36 @@ def update_venues(transaction): # [END spanner_set_transaction_tag] +def set_transaction_timeout(instance_id, database_id): + """Executes a transaction with a transaction timeout.""" + # [START spanner_transaction_timeout] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def read_then_write(transaction): + # Read records. + results = transaction.execute_sql( + "SELECT SingerId, FirstName, LastName FROM Singers ORDER BY LastName, FirstName" + ) + for result in results: + print("SingerId: {}, FirstName: {}, LastName: {}".format(*result)) + + # Insert a record. + row_ct = transaction.execute_update( + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (100, 'George', 'Washington')" + ) + print("{} record(s) inserted.".format(row_ct)) + + # configure transaction timeout to 60 seconds + database.run_in_transaction(read_then_write, timeout_secs=60) + + # [END spanner_transaction_timeout] + + def set_request_tag(instance_id, database_id): """Executes a snapshot read with a request tag.""" # [START spanner_set_request_tag] @@ -3272,6 +3302,7 @@ def update_instance_default_backup_schedule_type(instance_id): print("Updated instance {} to have default backup schedules".format(instance_id)) + # [END spanner_update_instance_default_backup_schedule_type] @@ -3617,6 +3648,9 @@ def add_split_points(instance_id, database_id): subparsers.add_parser("add_column", help=add_column.__doc__) subparsers.add_parser("update_data", help=update_data.__doc__) subparsers.add_parser("set_max_commit_delay", help=set_max_commit_delay.__doc__) + subparsers.add_parser( + "set_transaction_timeout", help=set_transaction_timeout.__doc__ + ) subparsers.add_parser( "query_data_with_new_column", help=query_data_with_new_column.__doc__ ) @@ -3783,6 +3817,8 @@ def add_split_points(instance_id, database_id): update_data(args.instance_id, args.database_id) elif args.command == "set_max_commit_delay": set_max_commit_delay(args.instance_id, args.database_id) + elif args.command == "set_transaction_timeout": + set_transaction_timeout(args.instance_id, args.database_id) elif args.command == "query_data_with_new_column": query_data_with_new_column(args.instance_id, args.database_id) elif args.command == "read_write_transaction": diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index eb61e8bd1f..3fcd16755c 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -855,6 +855,13 @@ def test_set_transaction_tag(capsys, instance_id, sample_database): assert "New venue inserted." in out +@pytest.mark.dependency(depends=["insert_datatypes_data"]) +def test_set_transaction_timeout(capsys, instance_id, sample_database): + snippets.set_transaction_timeout(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 record(s) inserted." in out + + @pytest.mark.dependency(depends=["insert_data"]) def test_set_request_tag(capsys, instance_id, sample_database): snippets.set_request_tag(instance_id, sample_database.database_id) diff --git a/scripts/fixup_spanner_admin_database_v1_keywords.py b/scripts/fixup_spanner_admin_database_v1_keywords.py index bb10888f92..c4ab94b57c 100644 --- a/scripts/fixup_spanner_admin_database_v1_keywords.py +++ b/scripts/fixup_spanner_admin_database_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -64,7 +64,7 @@ class spanner_admin_databaseCallTransformer(cst.CSTTransformer): 'update_backup': ('backup', 'update_mask', ), 'update_backup_schedule': ('backup_schedule', 'update_mask', ), 'update_database': ('database', 'update_mask', ), - 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', ), + 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', 'throughput_mode', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/scripts/fixup_spanner_admin_instance_v1_keywords.py b/scripts/fixup_spanner_admin_instance_v1_keywords.py index 3b5fa8afb6..8200af5099 100644 --- a/scripts/fixup_spanner_admin_instance_v1_keywords.py +++ b/scripts/fixup_spanner_admin_instance_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py index 91d94cbef8..c7f41be11e 100644 --- a/scripts/fixup_spanner_v1_keywords.py +++ b/scripts/fixup_spanner_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 0523500895..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt index ad3f0fa58e..2010e549cc 100644 --- a/testing/constraints-3.13.txt +++ b/testing/constraints-3.13.txt @@ -1,7 +1,12 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/tests/__init__.py b/tests/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/mockserver_tests/mock_server_test_base.py b/tests/mockserver_tests/mock_server_test_base.py index b332c88d7c..7b4538d601 100644 --- a/tests/mockserver_tests/mock_server_test_base.py +++ b/tests/mockserver_tests/mock_server_test_base.py @@ -153,6 +153,7 @@ def setup_class(cls): def teardown_class(cls): if MockServerTestBase.server is not None: MockServerTestBase.server.stop(grace=None) + Client.NTH_CLIENT.reset() MockServerTestBase.server = None def setup_method(self, *args, **kwargs): @@ -186,6 +187,8 @@ def instance(self) -> Instance: def database(self) -> Database: if self._database is None: self._database = self.instance.database( - "test-database", pool=FixedSizePool(size=10) + "test-database", + pool=FixedSizePool(size=10), + enable_interceptors_in_tests=True, ) return self._database diff --git a/tests/mockserver_tests/test_aborted_transaction.py b/tests/mockserver_tests/test_aborted_transaction.py index 93eb42fe39..6a61dd4c73 100644 --- a/tests/mockserver_tests/test_aborted_transaction.py +++ b/tests/mockserver_tests/test_aborted_transaction.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import random from google.cloud.spanner_v1 import ( BatchCreateSessionsRequest, @@ -29,6 +30,12 @@ add_update_count, add_single_result, ) +from google.api_core import exceptions +from test_utils import retry + +retry_maybe_aborted_txn = retry.RetryErrors( + exceptions.Aborted, max_tries=5, delay=0, backoff=1 +) class TestAbortedTransaction(MockServerTestBase): @@ -119,6 +126,18 @@ def test_batch_commit_aborted(self): # The transaction is aborted and retried. self.assertTrue(isinstance(requests[2], CommitRequest)) + @retry_maybe_aborted_txn + def test_retry_helper(self): + # Randomly add an Aborted error for the Commit method on the mock server. + if random.random() < 0.5: + add_error(SpannerServicer.Commit.__name__, aborted_status()) + session = self.database.session() + session.create() + transaction = session.transaction() + transaction.begin() + transaction.insert("my_table", ["col1, col2"], [{"col1": 1, "col2": "One"}]) + transaction.commit() + def _insert_mutations(transaction: Transaction): transaction.insert("my_table", ["col1", "col2"], ["value1", "value2"]) diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py index 3706552d31..9db84b117f 100644 --- a/tests/mockserver_tests/test_basics.py +++ b/tests/mockserver_tests/test_basics.py @@ -93,6 +93,23 @@ def test_dbapi_partitioned_dml(self): TransactionOptions(dict(partitioned_dml={})), begin_request.options ) + def test_batch_create_sessions_unavailable(self): + add_select1_result() + add_error(SpannerServicer.BatchCreateSessions.__name__, unavailable_status()) + with self.database.snapshot() as snapshot: + results = snapshot.execute_sql("select 1") + result_list = [] + for row in results: + result_list.append(row) + self.assertEqual(1, row[0]) + self.assertEqual(1, len(result_list)) + requests = self.spanner_service.requests + self.assertEqual(3, len(requests), msg=requests) + # The BatchCreateSessions call should be retried. + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) + def test_execute_streaming_sql_unavailable(self): add_select1_result() # Add an UNAVAILABLE error that is returned the first time the diff --git a/tests/mockserver_tests/test_request_id_header.py b/tests/mockserver_tests/test_request_id_header.py new file mode 100644 index 0000000000..6503d179d5 --- /dev/null +++ b/tests/mockserver_tests/test_request_id_header.py @@ -0,0 +1,325 @@ +# Copyright 2025 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random +import threading + +from google.cloud.spanner_v1 import ( + BatchCreateSessionsRequest, + BeginTransactionRequest, + ExecuteSqlRequest, +) +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID +from google.cloud.spanner_v1.testing.mock_spanner import SpannerServicer +from tests.mockserver_tests.mock_server_test_base import ( + MockServerTestBase, + add_select1_result, + aborted_status, + add_error, + unavailable_status, +) + + +class TestRequestIDHeader(MockServerTestBase): + def tearDown(self): + self.database._x_goog_request_id_interceptor.reset() + + def test_snapshot_execute_sql(self): + add_select1_result() + if not getattr(self.database, "_interceptors", None): + self.database._interceptors = MockServerTestBase._interceptors + with self.database.snapshot() as snapshot: + results = snapshot.execute_sql("select 1") + result_list = [] + for row in results: + result_list.append(row) + self.assertEqual(1, row[0]) + self.assertEqual(1, len(result_list)) + + requests = self.spanner_service.requests + self.assertEqual(2, len(requests), msg=requests) + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], ExecuteSqlRequest)) + + NTH_CLIENT = self.database._nth_client_id + CHANNEL_ID = self.database._channel_id + # Now ensure monotonicity of the received request-id segments. + got_stream_segments, got_unary_segments = self.canonicalize_request_id_headers() + want_unary_segments = [ + ( + "/google.spanner.v1.Spanner/BatchCreateSessions", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 1, 1), + ) + ] + want_stream_segments = [ + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 2, 1), + ) + ] + + assert got_unary_segments == want_unary_segments + assert got_stream_segments == want_stream_segments + + def test_snapshot_read_concurrent(self): + add_select1_result() + db = self.database + # Trigger BatchCreateSessions first. + with db.snapshot() as snapshot: + rows = snapshot.execute_sql("select 1") + for row in rows: + _ = row + + # The other requests can then proceed. + def select1(): + with db.snapshot() as snapshot: + rows = snapshot.execute_sql("select 1") + res_list = [] + for row in rows: + self.assertEqual(1, row[0]) + res_list.append(row) + self.assertEqual(1, len(res_list)) + + n = 10 + threads = [] + for i in range(n): + th = threading.Thread(target=select1, name=f"snapshot-select1-{i}") + threads.append(th) + th.start() + + random.shuffle(threads) + for thread in threads: + thread.join() + + requests = self.spanner_service.requests + # We expect 2 + n requests, because: + # 1. The initial query triggers one BatchCreateSessions call + one ExecuteStreamingSql call. + # 2. Each following query triggers one ExecuteStreamingSql call. + self.assertEqual(2 + n, len(requests), msg=requests) + + client_id = db._nth_client_id + channel_id = db._channel_id + got_stream_segments, got_unary_segments = self.canonicalize_request_id_headers() + + want_unary_segments = [ + ( + "/google.spanner.v1.Spanner/BatchCreateSessions", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 1, 1), + ), + ] + assert got_unary_segments == want_unary_segments + + want_stream_segments = [ + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 2, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 3, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 4, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 5, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 6, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 7, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 8, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 9, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 10, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 11, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, client_id, channel_id, 12, 1), + ), + ] + assert got_stream_segments == want_stream_segments + + def test_database_run_in_transaction_retries_on_abort(self): + counters = dict(aborted=0) + want_failed_attempts = 2 + + def select_in_txn(txn): + results = txn.execute_sql("select 1") + for row in results: + _ = row + + if counters["aborted"] < want_failed_attempts: + counters["aborted"] += 1 + add_error(SpannerServicer.Commit.__name__, aborted_status()) + + add_select1_result() + if not getattr(self.database, "_interceptors", None): + self.database._interceptors = MockServerTestBase._interceptors + + self.database.run_in_transaction(select_in_txn) + + def test_database_execute_partitioned_dml_request_id(self): + add_select1_result() + if not getattr(self.database, "_interceptors", None): + self.database._interceptors = MockServerTestBase._interceptors + _ = self.database.execute_partitioned_dml("select 1") + + requests = self.spanner_service.requests + self.assertEqual(3, len(requests), msg=requests) + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], BeginTransactionRequest)) + self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) + + # Now ensure monotonicity of the received request-id segments. + got_stream_segments, got_unary_segments = self.canonicalize_request_id_headers() + NTH_CLIENT = self.database._nth_client_id + CHANNEL_ID = self.database._channel_id + want_unary_segments = [ + ( + "/google.spanner.v1.Spanner/BatchCreateSessions", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 1, 1), + ), + ( + "/google.spanner.v1.Spanner/BeginTransaction", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 2, 1), + ), + ] + want_stream_segments = [ + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 3, 1), + ) + ] + + assert got_unary_segments == want_unary_segments + assert got_stream_segments == want_stream_segments + + def test_unary_retryable_error(self): + add_select1_result() + add_error(SpannerServicer.BatchCreateSessions.__name__, unavailable_status()) + + if not getattr(self.database, "_interceptors", None): + self.database._interceptors = MockServerTestBase._interceptors + with self.database.snapshot() as snapshot: + results = snapshot.execute_sql("select 1") + result_list = [] + for row in results: + result_list.append(row) + self.assertEqual(1, row[0]) + self.assertEqual(1, len(result_list)) + + requests = self.spanner_service.requests + self.assertEqual(3, len(requests), msg=requests) + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) + + NTH_CLIENT = self.database._nth_client_id + CHANNEL_ID = self.database._channel_id + # Now ensure monotonicity of the received request-id segments. + got_stream_segments, got_unary_segments = self.canonicalize_request_id_headers() + + want_stream_segments = [ + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 2, 1), + ) + ] + assert got_stream_segments == want_stream_segments + + want_unary_segments = [ + ( + "/google.spanner.v1.Spanner/BatchCreateSessions", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 1, 1), + ), + ( + "/google.spanner.v1.Spanner/BatchCreateSessions", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 1, 2), + ), + ] + # TODO(@odeke-em): enable this test in the next iteration + # when we've figured out unary retries with UNAVAILABLE. + # See https://github.com/googleapis/python-spanner/issues/1379. + if True: + print( + "TODO(@odeke-em): enable request_id checking when we figure out propagation for unary requests" + ) + else: + assert got_unary_segments == want_unary_segments + + def test_streaming_retryable_error(self): + add_select1_result() + add_error(SpannerServicer.ExecuteStreamingSql.__name__, unavailable_status()) + + if not getattr(self.database, "_interceptors", None): + self.database._interceptors = MockServerTestBase._interceptors + with self.database.snapshot() as snapshot: + results = snapshot.execute_sql("select 1") + result_list = [] + for row in results: + result_list.append(row) + self.assertEqual(1, row[0]) + self.assertEqual(1, len(result_list)) + + requests = self.spanner_service.requests + self.assertEqual(3, len(requests), msg=requests) + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], ExecuteSqlRequest)) + self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) + + NTH_CLIENT = self.database._nth_client_id + CHANNEL_ID = self.database._channel_id + # Now ensure monotonicity of the received request-id segments. + got_stream_segments, got_unary_segments = self.canonicalize_request_id_headers() + want_unary_segments = [ + ( + "/google.spanner.v1.Spanner/BatchCreateSessions", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 1, 1), + ), + ] + want_stream_segments = [ + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 2, 1), + ), + ( + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + (1, REQ_RAND_PROCESS_ID, NTH_CLIENT, CHANNEL_ID, 2, 2), + ), + ] + + assert got_unary_segments == want_unary_segments + assert got_stream_segments == want_stream_segments + + def canonicalize_request_id_headers(self): + src = self.database._x_goog_request_id_interceptor + return src._stream_req_segments, src._unary_req_segments diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py index f37aefc2e5..1fc897b39c 100644 --- a/tests/system/_helpers.py +++ b/tests/system/_helpers.py @@ -74,8 +74,8 @@ retry_429_503 = retry.RetryErrors( exceptions.TooManyRequests, exceptions.ServiceUnavailable, 8 ) -retry_mabye_aborted_txn = retry.RetryErrors(exceptions.ServerError, exceptions.Aborted) -retry_mabye_conflict = retry.RetryErrors(exceptions.ServerError, exceptions.Conflict) +retry_maybe_aborted_txn = retry.RetryErrors(exceptions.Aborted) +retry_maybe_conflict = retry.RetryErrors(exceptions.Conflict) def _has_all_ddl(database): diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py index a98f100bcc..9a45051c77 100644 --- a/tests/system/test_dbapi.py +++ b/tests/system/test_dbapi.py @@ -763,12 +763,15 @@ def test_commit_abort_retry(self, dbapi_database): dbapi_database._method_abort_interceptor.set_method_to_abort( COMMIT_METHOD, self._conn ) - # called 2 times + # called (at least) 2 times self._conn.commit() dbapi_database._method_abort_interceptor.reset() - assert method_count_interceptor._counts[COMMIT_METHOD] == 2 - assert method_count_interceptor._counts[EXECUTE_BATCH_DML_METHOD] == 4 - assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] == 10 + # Verify the number of calls. + # We don't know the exact number of calls, as Spanner could also + # abort the transaction. + assert method_count_interceptor._counts[COMMIT_METHOD] >= 2 + assert method_count_interceptor._counts[EXECUTE_BATCH_DML_METHOD] >= 4 + assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] >= 10 self._cursor.execute("SELECT * FROM contacts") got_rows = self._cursor.fetchall() @@ -829,10 +832,12 @@ def test_execute_sql_abort_retry_multiple_times(self, dbapi_database): self._cursor.fetchmany(2) dbapi_database._method_abort_interceptor.reset() self._conn.commit() - # Check that all rpcs except commit should be called 3 times the original - assert method_count_interceptor._counts[COMMIT_METHOD] == 1 - assert method_count_interceptor._counts[EXECUTE_BATCH_DML_METHOD] == 3 - assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] == 3 + # Check that all RPCs except commit should be called at least 3 times + # We don't know the exact number of attempts, as the transaction could + # also be aborted by Spanner (and not only the test interceptor). + assert method_count_interceptor._counts[COMMIT_METHOD] >= 1 + assert method_count_interceptor._counts[EXECUTE_BATCH_DML_METHOD] >= 3 + assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] >= 3 self._cursor.execute("SELECT * FROM contacts") got_rows = self._cursor.fetchall() @@ -860,9 +865,9 @@ def test_execute_batch_dml_abort_retry(self, dbapi_database): self._cursor.execute("run batch") dbapi_database._method_abort_interceptor.reset() self._conn.commit() - assert method_count_interceptor._counts[COMMIT_METHOD] == 1 - assert method_count_interceptor._counts[EXECUTE_BATCH_DML_METHOD] == 3 - assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] == 6 + assert method_count_interceptor._counts[COMMIT_METHOD] >= 1 + assert method_count_interceptor._counts[EXECUTE_BATCH_DML_METHOD] >= 3 + assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] >= 6 self._cursor.execute("SELECT * FROM contacts") got_rows = self._cursor.fetchall() @@ -874,28 +879,28 @@ def test_multiple_aborts_in_transaction(self, dbapi_database): method_count_interceptor = dbapi_database._method_count_interceptor method_count_interceptor.reset() - # called 3 times + # called at least 3 times self._insert_row(1) dbapi_database._method_abort_interceptor.set_method_to_abort( EXECUTE_STREAMING_SQL_METHOD, self._conn ) - # called 3 times + # called at least 3 times self._cursor.execute("SELECT * FROM contacts") dbapi_database._method_abort_interceptor.reset() self._cursor.fetchall() - # called 2 times + # called at least 2 times self._insert_row(2) - # called 2 times + # called at least 2 times self._cursor.execute("SELECT * FROM contacts") self._cursor.fetchone() dbapi_database._method_abort_interceptor.set_method_to_abort( COMMIT_METHOD, self._conn ) - # called 2 times + # called at least 2 times self._conn.commit() dbapi_database._method_abort_interceptor.reset() - assert method_count_interceptor._counts[COMMIT_METHOD] == 2 - assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] == 10 + assert method_count_interceptor._counts[COMMIT_METHOD] >= 2 + assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] >= 10 self._cursor.execute("SELECT * FROM contacts") got_rows = self._cursor.fetchall() @@ -916,8 +921,8 @@ def test_consecutive_aborted_transactions(self, dbapi_database): ) self._conn.commit() dbapi_database._method_abort_interceptor.reset() - assert method_count_interceptor._counts[COMMIT_METHOD] == 2 - assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] == 6 + assert method_count_interceptor._counts[COMMIT_METHOD] >= 2 + assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] >= 6 method_count_interceptor = dbapi_database._method_count_interceptor method_count_interceptor.reset() @@ -930,8 +935,8 @@ def test_consecutive_aborted_transactions(self, dbapi_database): ) self._conn.commit() dbapi_database._method_abort_interceptor.reset() - assert method_count_interceptor._counts[COMMIT_METHOD] == 2 - assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] == 6 + assert method_count_interceptor._counts[COMMIT_METHOD] >= 2 + assert method_count_interceptor._counts[EXECUTE_STREAMING_SQL_METHOD] >= 6 self._cursor.execute("SELECT * FROM contacts") got_rows = self._cursor.fetchall() diff --git a/tests/system/test_observability_options.py b/tests/system/test_observability_options.py index d40b34f800..c3eabffe12 100644 --- a/tests/system/test_observability_options.py +++ b/tests/system/test_observability_options.py @@ -109,8 +109,23 @@ def test_propagation(enable_extended_tracing): len(from_inject_spans) >= 2 ) # "Expecting at least 2 spans from the injected trace exporter" gotNames = [span.name for span in from_inject_spans] + + # Check if multiplexed sessions are enabled + import os + + multiplexed_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS", "").lower() == "true" + ) + + # Determine expected session span name based on multiplexed sessions + expected_session_span_name = ( + "CloudSpanner.CreateMultiplexedSession" + if multiplexed_enabled + else "CloudSpanner.CreateSession" + ) + wantNames = [ - "CloudSpanner.CreateSession", + expected_session_span_name, "CloudSpanner.Snapshot.execute_sql", ] assert gotNames == wantNames @@ -392,6 +407,7 @@ def tx_update(txn): reason="Tracing requires OpenTelemetry", ) def test_database_partitioned_error(): + import os from opentelemetry.trace.status import StatusCode db, trace_exporter = create_db_trace_exporter() @@ -402,43 +418,84 @@ def test_database_partitioned_error(): pass got_statuses, got_events = finished_spans_statuses(trace_exporter) - # Check for the series of events - want_events = [ - ("Acquiring session", {"kind": "BurstyPool"}), - ("Waiting for a session to become available", {"kind": "BurstyPool"}), - ("No sessions available in pool. Creating session", {"kind": "BurstyPool"}), - ("Creating Session", {}), - ("Starting BeginTransaction", {}), - ( + + multiplexed_partitioned_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_PARTITIONED_OPS") == "true" + ) + + if multiplexed_partitioned_enabled: + expected_event_names = [ + "Creating Session", + "Using session", + "Starting BeginTransaction", + "Returning session", "exception", - { - "exception.type": "google.api_core.exceptions.InvalidArgument", - "exception.message": "400 Table not found: NonExistent [at 1:8]\nUPDATE NonExistent SET name = 'foo' WHERE id > 1\n ^", - "exception.stacktrace": "EPHEMERAL", - "exception.escaped": "False", - }, - ), - ( "exception", - { - "exception.type": "google.api_core.exceptions.InvalidArgument", - "exception.message": "400 Table not found: NonExistent [at 1:8]\nUPDATE NonExistent SET name = 'foo' WHERE id > 1\n ^", - "exception.stacktrace": "EPHEMERAL", - "exception.escaped": "False", - }, - ), - ] - assert got_events == want_events + ] + assert len(got_events) == len(expected_event_names) + for i, expected_name in enumerate(expected_event_names): + assert got_events[i][0] == expected_name + + assert got_events[1][1]["multiplexed"] is True + + assert got_events[3][1]["multiplexed"] is True + + for i in [4, 5]: + assert ( + got_events[i][1]["exception.type"] + == "google.api_core.exceptions.InvalidArgument" + ) + assert ( + "Table not found: NonExistent" in got_events[i][1]["exception.message"] + ) + else: + expected_event_names = [ + "Acquiring session", + "Waiting for a session to become available", + "No sessions available in pool. Creating session", + "Creating Session", + "Using session", + "Starting BeginTransaction", + "Returning session", + "exception", + "exception", + ] + + assert len(got_events) == len(expected_event_names) + for i, expected_name in enumerate(expected_event_names): + assert got_events[i][0] == expected_name + + assert got_events[0][1]["kind"] == "BurstyPool" + assert got_events[1][1]["kind"] == "BurstyPool" + assert got_events[2][1]["kind"] == "BurstyPool" + + assert got_events[4][1]["multiplexed"] is False + + assert got_events[6][1]["multiplexed"] is False + + for i in [7, 8]: + assert ( + got_events[i][1]["exception.type"] + == "google.api_core.exceptions.InvalidArgument" + ) + assert ( + "Table not found: NonExistent" in got_events[i][1]["exception.message"] + ) - # Check for the statues. codes = StatusCode + + expected_session_span_name = ( + "CloudSpanner.CreateMultiplexedSession" + if multiplexed_partitioned_enabled + else "CloudSpanner.CreateSession" + ) want_statuses = [ ( "CloudSpanner.Database.execute_partitioned_pdml", codes.ERROR, "InvalidArgument: 400 Table not found: NonExistent [at 1:8]\nUPDATE NonExistent SET name = 'foo' WHERE id > 1\n ^", ), - ("CloudSpanner.CreateSession", codes.OK, None), + (expected_session_span_name, codes.OK, None), ( "CloudSpanner.ExecuteStreamingSql", codes.ERROR, diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 73b55b035d..26b389090f 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -33,6 +33,10 @@ from tests import _helpers as ot_helpers from . import _helpers from . import _sample_data +from google.cloud.spanner_v1.request_id_header import ( + REQ_RAND_PROCESS_ID, + parse_request_id, +) SOME_DATE = datetime.date(2011, 1, 17) @@ -426,6 +430,8 @@ def test_session_crud(sessions_database): def test_batch_insert_then_read(sessions_database, ot_exporter): + import os + db_name = sessions_database.name sd = _sample_data @@ -441,32 +447,85 @@ def test_batch_insert_then_read(sessions_database, ot_exporter): if ot_exporter is not None: span_list = ot_exporter.get_finished_spans() + sampling_req_id = parse_request_id( + span_list[0].attributes["x_goog_spanner_request_id"] + ) + nth_req0 = sampling_req_id[-2] + + db = sessions_database + + multiplexed_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS", "").lower() == "true" + ) + assert_span_attributes( ot_exporter, "CloudSpanner.GetSession", - attributes=_make_attributes(db_name, session_found=True), + attributes=_make_attributes( + db_name, + session_found=True, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 0}.1", + ), span=span_list[0], ) assert_span_attributes( ot_exporter, "CloudSpanner.Batch.commit", - attributes=_make_attributes(db_name, num_mutations=2), + attributes=_make_attributes( + db_name, + num_mutations=2, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 1}.1", + ), span=span_list[1], ) - assert_span_attributes( - ot_exporter, - "CloudSpanner.GetSession", - attributes=_make_attributes(db_name, session_found=True), - span=span_list[2], - ) - assert_span_attributes( - ot_exporter, - "CloudSpanner.Snapshot.read", - attributes=_make_attributes(db_name, columns=sd.COLUMNS, table_id=sd.TABLE), - span=span_list[3], - ) - assert len(span_list) == 4 + if len(span_list) == 4: + if multiplexed_enabled: + expected_snapshot_span_name = "CloudSpanner.CreateMultiplexedSession" + snapshot_session_attributes = _make_attributes( + db_name, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 2}.1", + ) + else: + expected_snapshot_span_name = "CloudSpanner.GetSession" + snapshot_session_attributes = _make_attributes( + db_name, + session_found=True, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 2}.1", + ) + + assert_span_attributes( + ot_exporter, + expected_snapshot_span_name, + attributes=snapshot_session_attributes, + span=span_list[2], + ) + + assert_span_attributes( + ot_exporter, + "CloudSpanner.Snapshot.read", + attributes=_make_attributes( + db_name, + columns=sd.COLUMNS, + table_id=sd.TABLE, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 3}.1", + ), + span=span_list[3], + ) + elif len(span_list) == 3: + assert_span_attributes( + ot_exporter, + "CloudSpanner.Snapshot.read", + attributes=_make_attributes( + db_name, + columns=sd.COLUMNS, + table_id=sd.TABLE, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 2}.1", + ), + span=span_list[2], + ) + else: + raise AssertionError(f"Unexpected number of spans: {len(span_list)}") def test_batch_insert_then_read_string_array_of_string(sessions_database, not_postgres): @@ -578,7 +637,7 @@ def test_batch_insert_w_commit_timestamp(sessions_database, not_postgres): assert not deleted -@_helpers.retry_mabye_aborted_txn +@_helpers.retry_maybe_aborted_txn def test_transaction_read_and_insert_then_rollback( sessions_database, ot_exporter, @@ -587,68 +646,136 @@ def test_transaction_read_and_insert_then_rollback( sd = _sample_data db_name = sessions_database.name - session = sessions_database.session() - session.create() - sessions_to_delete.append(session) - with sessions_database.batch() as batch: batch.delete(sd.TABLE, sd.ALL) - transaction = session.transaction() - transaction.begin() + def transaction_work(transaction): + rows = list(transaction.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + assert rows == [] - rows = list(transaction.read(sd.TABLE, sd.COLUMNS, sd.ALL)) - assert rows == [] + transaction.insert(sd.TABLE, sd.COLUMNS, sd.ROW_DATA) - transaction.insert(sd.TABLE, sd.COLUMNS, sd.ROW_DATA) + rows = list(transaction.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + assert rows == [] - # Inserted rows can't be read until after commit. - rows = list(transaction.read(sd.TABLE, sd.COLUMNS, sd.ALL)) - assert rows == [] - transaction.rollback() + raise Exception("Intentional rollback") - rows = list(session.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + try: + sessions_database.run_in_transaction(transaction_work) + except Exception as e: + if "Intentional rollback" not in str(e): + raise + + with sessions_database.snapshot() as snapshot: + rows = list(snapshot.read(sd.TABLE, sd.COLUMNS, sd.ALL)) assert rows == [] if ot_exporter is not None: + import os + + multiplexed_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS", "").lower() == "true" + ) + span_list = ot_exporter.get_finished_spans() got_span_names = [span.name for span in span_list] - want_span_names = [ - "CloudSpanner.CreateSession", - "CloudSpanner.GetSession", - "CloudSpanner.Batch.commit", - "CloudSpanner.Transaction.begin", - "CloudSpanner.Transaction.read", - "CloudSpanner.Transaction.read", - "CloudSpanner.Transaction.rollback", - "CloudSpanner.Snapshot.read", - ] - assert got_span_names == want_span_names + if multiplexed_enabled: + # With multiplexed sessions enabled: + # - Batch operations still use regular sessions (GetSession) + # - run_in_transaction uses regular sessions (GetSession) + # - Snapshot (read-only) can use multiplexed sessions (CreateMultiplexedSession) + # Note: Session creation span may not appear if session is reused from pool + expected_span_names = [ + "CloudSpanner.GetSession", # Batch operation + "CloudSpanner.Batch.commit", # Batch commit + "CloudSpanner.GetSession", # Transaction session + "CloudSpanner.Transaction.read", # First read + "CloudSpanner.Transaction.read", # Second read + "CloudSpanner.Transaction.rollback", # Rollback due to exception + "CloudSpanner.Session.run_in_transaction", # Session transaction wrapper + "CloudSpanner.Database.run_in_transaction", # Database transaction wrapper + "CloudSpanner.Snapshot.read", # Snapshot read + ] + # Check if we have a multiplexed session creation span + if "CloudSpanner.CreateMultiplexedSession" in got_span_names: + expected_span_names.insert(-1, "CloudSpanner.CreateMultiplexedSession") + else: + # Without multiplexed sessions, all operations use regular sessions + expected_span_names = [ + "CloudSpanner.GetSession", # Batch operation + "CloudSpanner.Batch.commit", # Batch commit + "CloudSpanner.GetSession", # Transaction session + "CloudSpanner.Transaction.read", # First read + "CloudSpanner.Transaction.read", # Second read + "CloudSpanner.Transaction.rollback", # Rollback due to exception + "CloudSpanner.Session.run_in_transaction", # Session transaction wrapper + "CloudSpanner.Database.run_in_transaction", # Database transaction wrapper + "CloudSpanner.Snapshot.read", # Snapshot read + ] + # Check if we have a session creation span for snapshot + if len(got_span_names) > len(expected_span_names): + expected_span_names.insert(-1, "CloudSpanner.GetSession") + + assert got_span_names == expected_span_names + + sampling_req_id = parse_request_id( + span_list[0].attributes["x_goog_spanner_request_id"] + ) + nth_req0 = sampling_req_id[-2] + + db = sessions_database + + # Span 0: batch operation (always uses GetSession from pool) assert_span_attributes( ot_exporter, - "CloudSpanner.CreateSession", - attributes=_make_attributes(db_name), + "CloudSpanner.GetSession", + attributes=_make_attributes( + db_name, + session_found=True, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 0}.1", + ), span=span_list[0], ) + + # Span 1: batch commit assert_span_attributes( ot_exporter, - "CloudSpanner.GetSession", - attributes=_make_attributes(db_name, session_found=True), + "CloudSpanner.Batch.commit", + attributes=_make_attributes( + db_name, + num_mutations=1, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 1}.1", + ), span=span_list[1], ) + + # Span 2: GetSession for transaction assert_span_attributes( ot_exporter, - "CloudSpanner.Batch.commit", - attributes=_make_attributes(db_name, num_mutations=1), + "CloudSpanner.GetSession", + attributes=_make_attributes( + db_name, + session_found=True, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 2}.1", + ), span=span_list[2], ) + + # Span 3: First transaction read assert_span_attributes( ot_exporter, - "CloudSpanner.Transaction.begin", - attributes=_make_attributes(db_name), + "CloudSpanner.Transaction.read", + attributes=_make_attributes( + db_name, + table_id=sd.TABLE, + columns=sd.COLUMNS, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 3}.1", + ), span=span_list[3], ) + + # Span 4: Second transaction read assert_span_attributes( ot_exporter, "CloudSpanner.Transaction.read", @@ -656,25 +783,92 @@ def test_transaction_read_and_insert_then_rollback( db_name, table_id=sd.TABLE, columns=sd.COLUMNS, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 4}.1", ), span=span_list[4], ) + + # Span 5: Transaction rollback assert_span_attributes( ot_exporter, - "CloudSpanner.Transaction.read", + "CloudSpanner.Transaction.rollback", attributes=_make_attributes( db_name, - table_id=sd.TABLE, - columns=sd.COLUMNS, + x_goog_spanner_request_id=f"1.{REQ_RAND_PROCESS_ID}.{db._nth_client_id}.{db._channel_id}.{nth_req0 + 5}.1", ), span=span_list[5], ) + + # Span 6: Session.run_in_transaction (ERROR status due to intentional exception) assert_span_attributes( ot_exporter, - "CloudSpanner.Transaction.rollback", + "CloudSpanner.Session.run_in_transaction", + status=ot_helpers.StatusCode.ERROR, attributes=_make_attributes(db_name), span=span_list[6], ) + + # Span 7: Database.run_in_transaction (ERROR status due to intentional exception) + assert_span_attributes( + ot_exporter, + "CloudSpanner.Database.run_in_transaction", + status=ot_helpers.StatusCode.ERROR, + attributes=_make_attributes(db_name), + span=span_list[7], + ) + + # Check if we have a snapshot session creation span + snapshot_read_span_index = -1 + snapshot_session_span_index = -1 + + for i, span in enumerate(span_list): + if span.name == "CloudSpanner.Snapshot.read": + snapshot_read_span_index = i + break + + # Look for session creation span before the snapshot read + if snapshot_read_span_index > 8: + snapshot_session_span_index = snapshot_read_span_index - 1 + + if ( + multiplexed_enabled + and span_list[snapshot_session_span_index].name + == "CloudSpanner.CreateMultiplexedSession" + ): + expected_snapshot_span_name = "CloudSpanner.CreateMultiplexedSession" + snapshot_session_attributes = _make_attributes( + db_name, + x_goog_spanner_request_id=span_list[ + snapshot_session_span_index + ].attributes["x_goog_spanner_request_id"], + ) + assert_span_attributes( + ot_exporter, + expected_snapshot_span_name, + attributes=snapshot_session_attributes, + span=span_list[snapshot_session_span_index], + ) + elif ( + not multiplexed_enabled + and span_list[snapshot_session_span_index].name + == "CloudSpanner.GetSession" + ): + expected_snapshot_span_name = "CloudSpanner.GetSession" + snapshot_session_attributes = _make_attributes( + db_name, + session_found=True, + x_goog_spanner_request_id=span_list[ + snapshot_session_span_index + ].attributes["x_goog_spanner_request_id"], + ) + assert_span_attributes( + ot_exporter, + expected_snapshot_span_name, + attributes=snapshot_session_attributes, + span=span_list[snapshot_session_span_index], + ) + + # Snapshot read span assert_span_attributes( ot_exporter, "CloudSpanner.Snapshot.read", @@ -682,12 +876,15 @@ def test_transaction_read_and_insert_then_rollback( db_name, table_id=sd.TABLE, columns=sd.COLUMNS, + x_goog_spanner_request_id=span_list[ + snapshot_read_span_index + ].attributes["x_goog_spanner_request_id"], ), - span=span_list[7], + span=span_list[snapshot_read_span_index], ) -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict def test_transaction_read_and_insert_then_exception(sessions_database): class CustomException(Exception): pass @@ -714,7 +911,7 @@ def _transaction_read_then_raise(transaction): assert rows == [] -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict def test_transaction_read_and_insert_or_update_then_commit( sessions_database, sessions_to_delete, @@ -771,8 +968,8 @@ def _generate_insert_returning_statement(row, database_dialect): return f"INSERT INTO {table} ({column_list}) VALUES ({row_data}) {returning}" -@_helpers.retry_mabye_conflict -@_helpers.retry_mabye_aborted_txn +@_helpers.retry_maybe_conflict +@_helpers.retry_maybe_aborted_txn def test_transaction_execute_sql_w_dml_read_rollback( sessions_database, sessions_to_delete, @@ -809,7 +1006,7 @@ def test_transaction_execute_sql_w_dml_read_rollback( # [END spanner_test_dml_rollback_txn_not_committed] -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict def test_transaction_execute_update_read_commit(sessions_database, sessions_to_delete): # [START spanner_test_dml_read_your_writes] sd = _sample_data @@ -838,7 +1035,7 @@ def test_transaction_execute_update_read_commit(sessions_database, sessions_to_d # [END spanner_test_dml_read_your_writes] -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict def test_transaction_execute_update_then_insert_commit( sessions_database, sessions_to_delete ): @@ -870,7 +1067,7 @@ def test_transaction_execute_update_then_insert_commit( # [END spanner_test_dml_with_mutation] -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict @pytest.mark.skipif( _helpers.USE_EMULATOR, reason="Emulator does not support DML Returning." ) @@ -901,7 +1098,7 @@ def test_transaction_execute_sql_dml_returning( sd._check_rows_data(rows) -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict @pytest.mark.skipif( _helpers.USE_EMULATOR, reason="Emulator does not support DML Returning." ) @@ -929,7 +1126,7 @@ def test_transaction_execute_update_dml_returning( sd._check_rows_data(rows) -@_helpers.retry_mabye_conflict +@_helpers.retry_maybe_conflict @pytest.mark.skipif( _helpers.USE_EMULATOR, reason="Emulator does not support DML Returning." ) @@ -3114,3 +3311,116 @@ def test_interval_array_cast(transaction): sessions_database.run_in_transaction(test_interval_timestamp_comparison) sessions_database.run_in_transaction(test_interval_array_param) sessions_database.run_in_transaction(test_interval_array_cast) + + +def test_session_id_and_multiplexed_flag_behavior(sessions_database, ot_exporter): + import os + + sd = _sample_data + + with sessions_database.batch() as batch: + batch.delete(sd.TABLE, sd.ALL) + batch.insert(sd.TABLE, sd.COLUMNS, sd.ROW_DATA) + + multiplexed_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS", "").lower() == "true" + ) + + snapshot1_session_id = None + snapshot2_session_id = None + snapshot1_is_multiplexed = None + snapshot2_is_multiplexed = None + + snapshot1 = sessions_database.snapshot() + snapshot2 = sessions_database.snapshot() + + try: + with snapshot1 as snap1, snapshot2 as snap2: + rows1 = list(snap1.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + rows2 = list(snap2.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + + snapshot1_session_id = snap1._session.name + snapshot1_is_multiplexed = snap1._session.is_multiplexed + + snapshot2_session_id = snap2._session.name + snapshot2_is_multiplexed = snap2._session.is_multiplexed + except Exception: + with sessions_database.snapshot() as snap1: + rows1 = list(snap1.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + snapshot1_session_id = snap1._session.name + snapshot1_is_multiplexed = snap1._session.is_multiplexed + + with sessions_database.snapshot() as snap2: + rows2 = list(snap2.read(sd.TABLE, sd.COLUMNS, sd.ALL)) + snapshot2_session_id = snap2._session.name + snapshot2_is_multiplexed = snap2._session.is_multiplexed + + sd._check_rows_data(rows1) + sd._check_rows_data(rows2) + assert rows1 == rows2 + + assert snapshot1_session_id is not None + assert snapshot2_session_id is not None + assert snapshot1_is_multiplexed is not None + assert snapshot2_is_multiplexed is not None + + if multiplexed_enabled: + assert snapshot1_session_id == snapshot2_session_id + assert snapshot1_is_multiplexed is True + assert snapshot2_is_multiplexed is True + else: + assert snapshot1_is_multiplexed is False + assert snapshot2_is_multiplexed is False + + if ot_exporter is not None: + span_list = ot_exporter.get_finished_spans() + + session_spans = [] + read_spans = [] + + for span in span_list: + if ( + "CreateSession" in span.name + or "CreateMultiplexedSession" in span.name + or "GetSession" in span.name + ): + session_spans.append(span) + elif "Snapshot.read" in span.name: + read_spans.append(span) + + assert len(read_spans) == 2 + + if multiplexed_enabled: + multiplexed_session_spans = [ + s for s in session_spans if "CreateMultiplexedSession" in s.name + ] + + read_only_multiplexed_sessions = [ + s + for s in multiplexed_session_spans + if s.start_time > span_list[1].end_time + ] + # Allow for session reuse - if no new multiplexed sessions were created, + # it means an existing one was reused (which is valid behavior) + if len(read_only_multiplexed_sessions) == 0: + # Verify that multiplexed sessions are actually being used by checking + # that the snapshots themselves are multiplexed + assert snapshot1_is_multiplexed is True + assert snapshot2_is_multiplexed is True + assert snapshot1_session_id == snapshot2_session_id + else: + # New multiplexed session was created + assert len(read_only_multiplexed_sessions) >= 1 + + # Note: We don't need to assert specific counts for regular/get sessions + # as the key validation is that multiplexed sessions are being used properly + else: + read_only_session_spans = [ + s for s in session_spans if s.start_time > span_list[1].end_time + ] + assert len(read_only_session_spans) >= 1 + + multiplexed_session_spans = [ + s for s in session_spans if "CreateMultiplexedSession" in s.name + ] + assert len(multiplexed_session_spans) == 0 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/tests/unit/gapic/spanner_admin_database_v1/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/__init__.py +++ b/tests/unit/gapic/spanner_admin_database_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index 8c49a448c7..beda28dad6 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/tests/unit/gapic/spanner_admin_instance_v1/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/__init__.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index c3188125ac..9d7b0bb190 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/spanner_v1/__init__.py b/tests/unit/gapic/spanner_v1/__init__.py index 8f6cf06824..cbf94b283c 100644 --- a/tests/unit/gapic/spanner_v1/__init__.py +++ b/tests/unit/gapic/spanner_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index a1227d4861..83d9d72f7f 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -10447,6 +10447,7 @@ def test_execute_streaming_sql_rest_call_success(request_type): return_value = result_set.PartialResultSet( chunked_value=True, resume_token=b"resume_token_blob", + last=True, ) # Wrap the value into a proper Response obj @@ -10469,6 +10470,7 @@ def test_execute_streaming_sql_rest_call_success(request_type): assert isinstance(response, result_set.PartialResultSet) assert response.chunked_value is True assert response.resume_token == b"resume_token_blob" + assert response.last is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -10828,6 +10830,7 @@ def test_streaming_read_rest_call_success(request_type): return_value = result_set.PartialResultSet( chunked_value=True, resume_token=b"resume_token_blob", + last=True, ) # Wrap the value into a proper Response obj @@ -10850,6 +10853,7 @@ def test_streaming_read_rest_call_success(request_type): assert isinstance(response, result_set.PartialResultSet) assert response.chunked_value is True assert response.resume_token == b"resume_token_blob" + assert response.last is True @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/tests/unit/spanner_dbapi/test_connect.py b/tests/unit/spanner_dbapi/test_connect.py index 30ab3c7a8d..34d3d942ad 100644 --- a/tests/unit/spanner_dbapi/test_connect.py +++ b/tests/unit/spanner_dbapi/test_connect.py @@ -17,8 +17,8 @@ import unittest from unittest import mock -import google.auth.credentials - +import google +from google.auth.credentials import AnonymousCredentials INSTANCE = "test-instance" DATABASE = "test-database" @@ -45,7 +45,13 @@ def test_w_implicit(self, mock_client): instance = client.instance.return_value database = instance.database.return_value - connection = connect(INSTANCE, DATABASE) + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) self.assertIsInstance(connection, Connection) @@ -55,11 +61,14 @@ def test_w_implicit(self, mock_client): project=mock.ANY, credentials=mock.ANY, client_info=mock.ANY, + client_options=mock.ANY, route_to_leader_enabled=True, ) self.assertIs(connection.database, database) - instance.database.assert_called_once_with(DATABASE, pool=None) + instance.database.assert_called_once_with( + DATABASE, pool=None, database_role=None + ) # Datbase constructs its own pool self.assertIsNotNone(connection.database._pool) self.assertTrue(connection.instance._client.route_to_leader_enabled) @@ -75,6 +84,7 @@ def test_w_explicit(self, mock_client): client = mock_client.return_value instance = client.instance.return_value database = instance.database.return_value + role = "some_role" connection = connect( INSTANCE, @@ -82,6 +92,7 @@ def test_w_explicit(self, mock_client): PROJECT, credentials, pool=pool, + database_role=role, user_agent=USER_AGENT, route_to_leader_enabled=False, ) @@ -92,6 +103,7 @@ def test_w_explicit(self, mock_client): project=PROJECT, credentials=credentials, client_info=mock.ANY, + client_options=mock.ANY, route_to_leader_enabled=False, ) client_info = mock_client.call_args_list[0][1]["client_info"] @@ -102,7 +114,9 @@ def test_w_explicit(self, mock_client): client.instance.assert_called_once_with(INSTANCE) self.assertIs(connection.database, database) - instance.database.assert_called_once_with(DATABASE, pool=pool) + instance.database.assert_called_once_with( + DATABASE, pool=pool, database_role=role + ) def test_w_credential_file_path(self, mock_client): from google.cloud.spanner_dbapi import connect @@ -131,3 +145,17 @@ def test_w_credential_file_path(self, mock_client): client_info = factory.call_args_list[0][1]["client_info"] self.assertEqual(client_info.user_agent, USER_AGENT) self.assertEqual(client_info.python_version, PY_VERSION) + + def test_with_kwargs(self, mock_client): + from google.cloud.spanner_dbapi import connect + from google.cloud.spanner_dbapi import Connection + + client = mock_client.return_value + instance = client.instance.return_value + database = instance.database.return_value + self.assertIsNotNone(database) + + connection = connect(INSTANCE, DATABASE, ignore_transaction_warnings=True) + + self.assertIsInstance(connection, Connection) + self.assertTrue(connection._ignore_transaction_warnings) diff --git a/tests/unit/spanner_dbapi/test_connection.py b/tests/unit/spanner_dbapi/test_connection.py index 4bee9e93c7..04434195db 100644 --- a/tests/unit/spanner_dbapi/test_connection.py +++ b/tests/unit/spanner_dbapi/test_connection.py @@ -19,6 +19,7 @@ import unittest import warnings import pytest +from google.auth.credentials import AnonymousCredentials from google.cloud.spanner_admin_database_v1 import DatabaseDialect from google.cloud.spanner_dbapi.batch_dml_executor import BatchMode @@ -68,7 +69,11 @@ def _make_connection( from google.cloud.spanner_v1.client import Client # We don't need a real Client object to test the constructor - client = Client() + client = Client( + project="test", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) instance = Instance(INSTANCE, client=client) database = instance.database(DATABASE, database_dialect=database_dialect) return Connection(instance, database, **kwargs) @@ -239,7 +244,13 @@ def test_close(self): from google.cloud.spanner_dbapi import connect from google.cloud.spanner_dbapi import InterfaceError - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) self.assertFalse(connection.is_closed) @@ -815,6 +826,20 @@ def test_custom_client_connection(self): connection = connect("test-instance", "test-database", client=client) self.assertTrue(connection.instance._client == client) + def test_custom_database_role(self): + from google.cloud.spanner_dbapi import connect + + role = "some_role" + connection = connect( + "test-instance", + "test-database", + project="test-project", + database_role=role, + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) + self.assertEqual(connection.database.database_role, role) + def test_invalid_custom_client_connection(self): from google.cloud.spanner_dbapi import connect @@ -830,7 +855,12 @@ def test_invalid_custom_client_connection(self): def test_connection_wo_database(self): from google.cloud.spanner_dbapi import connect - connection = connect("test-instance") + connection = connect( + "test-instance", + credentials=AnonymousCredentials(), + project="test-project", + client_options={"api_endpoint": "none"}, + ) self.assertTrue(connection.database is None) @@ -858,8 +888,9 @@ def database( database_id="database_id", pool=None, database_dialect=DatabaseDialect.GOOGLE_STANDARD_SQL, + database_role=None, ): - return _Database(database_id, pool, database_dialect) + return _Database(database_id, pool, database_dialect, database_role) class _Database(object): @@ -868,7 +899,9 @@ def __init__( database_id="database_id", pool=None, database_dialect=DatabaseDialect.GOOGLE_STANDARD_SQL, + database_role=None, ): self.name = database_id self.pool = pool self.database_dialect = database_dialect + self.database_role = database_role diff --git a/tests/unit/spanner_dbapi/test_cursor.py b/tests/unit/spanner_dbapi/test_cursor.py index 2a8cddac9b..b96e8c1444 100644 --- a/tests/unit/spanner_dbapi/test_cursor.py +++ b/tests/unit/spanner_dbapi/test_cursor.py @@ -16,6 +16,8 @@ from unittest import mock import sys import unittest + +from google.auth.credentials import AnonymousCredentials from google.rpc.code_pb2 import ABORTED from google.cloud.spanner_dbapi.parsed_statement import ( @@ -127,7 +129,13 @@ def test_do_batch_update(self): sql = "DELETE FROM table WHERE col1 = %s" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) connection.autocommit = True transaction = self._transaction_mock(mock_response=[1, 1, 1]) @@ -479,7 +487,13 @@ def test_executemany_DLL(self, mock_client): def test_executemany_client_statement(self): from google.cloud.spanner_dbapi import connect, ProgrammingError - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) cursor = connection.cursor() @@ -497,7 +511,13 @@ def test_executemany(self, mock_client): operation = """SELECT * FROM table1 WHERE "col1" = @a1""" params_seq = ((1,), (2,)) - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) cursor = connection.cursor() cursor._result_set = [1, 2, 3] @@ -519,7 +539,13 @@ def test_executemany_delete_batch_autocommit(self): sql = "DELETE FROM table WHERE col1 = %s" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) connection.autocommit = True transaction = self._transaction_mock() @@ -551,7 +577,13 @@ def test_executemany_update_batch_autocommit(self): sql = "UPDATE table SET col1 = %s WHERE col2 = %s" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) connection.autocommit = True transaction = self._transaction_mock() @@ -595,7 +627,13 @@ def test_executemany_insert_batch_non_autocommit(self): sql = """INSERT INTO table (col1, "col2", `col3`, `"col4"`) VALUES (%s, %s, %s, %s)""" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) transaction = self._transaction_mock() @@ -632,7 +670,13 @@ def test_executemany_insert_batch_autocommit(self): sql = """INSERT INTO table (col1, "col2", `col3`, `"col4"`) VALUES (%s, %s, %s, %s)""" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) connection.autocommit = True @@ -676,7 +720,13 @@ def test_executemany_insert_batch_failed(self): sql = """INSERT INTO table (col1, "col2", `col3`, `"col4"`) VALUES (%s, %s, %s, %s)""" err_details = "Details here" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) connection.autocommit = True cursor = connection.cursor() @@ -705,7 +755,13 @@ def test_executemany_insert_batch_aborted(self): args = [(1, 2, 3, 4), (5, 6, 7, 8)] err_details = "Aborted details here" - connection = connect("test-instance", "test-database") + connection = connect( + "test-instance", + "test-database", + project="test-project", + credentials=AnonymousCredentials(), + client_options={"api_endpoint": "none"}, + ) transaction1 = mock.Mock() transaction1.batch_update = mock.Mock( diff --git a/tests/unit/spanner_dbapi/test_transaction_helper.py b/tests/unit/spanner_dbapi/test_transaction_helper.py index 1d50a51825..958fca0ce6 100644 --- a/tests/unit/spanner_dbapi/test_transaction_helper.py +++ b/tests/unit/spanner_dbapi/test_transaction_helper.py @@ -323,7 +323,7 @@ def test_retry_transaction_aborted_retry(self): None, ] - self._under_test.retry_transaction() + self._under_test.retry_transaction(default_retry_delay=0) run_mock.assert_has_calls( ( diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 7010affdd2..d29f030e55 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -15,6 +15,7 @@ import unittest import mock + from google.cloud.spanner_v1 import TransactionOptions @@ -824,7 +825,7 @@ def test_retry_on_error(self): True, ] - _retry(functools.partial(test_api.test_fxn)) + _retry(functools.partial(test_api.test_fxn), delay=0) self.assertEqual(test_api.test_fxn.call_count, 3) @@ -844,6 +845,7 @@ def test_retry_allowed_exceptions(self): _retry( functools.partial(test_api.test_fxn), allowed_exceptions={NotFound: None}, + delay=0, ) self.assertEqual(test_api.test_fxn.call_count, 2) @@ -860,7 +862,7 @@ def test_retry_count(self): ] with self.assertRaises(InternalServerError): - _retry(functools.partial(test_api.test_fxn), retry_count=1) + _retry(functools.partial(test_api.test_fxn), retry_count=1, delay=0) self.assertEqual(test_api.test_fxn.call_count, 2) @@ -879,6 +881,7 @@ def test_check_rst_stream_error(self): _retry( functools.partial(test_api.test_fxn), allowed_exceptions={InternalServerError: _check_rst_stream_error}, + delay=0, ) self.assertEqual(test_api.test_fxn.call_count, 3) @@ -896,7 +899,7 @@ def test_retry_on_aborted_exception_with_success_after_first_aborted_retry(self) ] deadline = time.time() + 30 result_after_retry = _retry_on_aborted_exception( - functools.partial(test_api.test_fxn), deadline + functools.partial(test_api.test_fxn), deadline, default_retry_delay=0 ) self.assertEqual(test_api.test_fxn.call_count, 2) @@ -910,16 +913,18 @@ def test_retry_on_aborted_exception_with_success_after_three_retries(self): test_api = mock.create_autospec(self.test_class) # Case where aborted exception is thrown after other generic exceptions + aborted = Aborted("aborted exception", errors=["Aborted error"]) test_api.test_fxn.side_effect = [ - Aborted("aborted exception", errors=("Aborted error")), - Aborted("aborted exception", errors=("Aborted error")), - Aborted("aborted exception", errors=("Aborted error")), + aborted, + aborted, + aborted, "true", ] deadline = time.time() + 30 _retry_on_aborted_exception( functools.partial(test_api.test_fxn), deadline=deadline, + default_retry_delay=0, ) self.assertEqual(test_api.test_fxn.call_count, 4) @@ -935,10 +940,12 @@ def test_retry_on_aborted_exception_raises_aborted_if_deadline_expires(self): Aborted("aborted exception", errors=("Aborted error")), "true", ] - deadline = time.time() + 0.1 + deadline = time.time() + 0.001 with self.assertRaises(Aborted): _retry_on_aborted_exception( - functools.partial(test_api.test_fxn), deadline=deadline + functools.partial(test_api.test_fxn), + deadline=deadline, + default_retry_delay=0.01, ) self.assertEqual(test_api.test_fxn.call_count, 1) diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py index 2cea740ab6..cb3dc7e2cd 100644 --- a/tests/unit/test_batch.py +++ b/tests/unit/test_batch.py @@ -37,6 +37,11 @@ from google.cloud.spanner_v1.keyset import KeySet from google.rpc.status_pb2 import Status +from google.cloud.spanner_v1._helpers import ( + AtomicCounter, + _metadata_with_request_id, +) +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID TABLE_NAME = "citizens" COLUMNS = ["email", "first_name", "last_name", "age"] @@ -211,10 +216,13 @@ def test_commit_grpc_error(self): with self.assertRaises(Unknown): batch.commit() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.Batch.commit", status=StatusCode.ERROR, - attributes=dict(BASE_ATTRIBUTES, num_mutations=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutations=1, x_goog_spanner_request_id=req_id + ), ) def test_commit_ok(self): @@ -244,11 +252,16 @@ def test_commit_ok(self): self.assertEqual(mutations, batch._mutations) self.assertIsInstance(single_use_txn, TransactionOptions) self.assertTrue(type(single_use_txn).pb(single_use_txn).HasField("read_write")) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertEqual( metadata, [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertEqual(request_options, RequestOptions()) @@ -256,7 +269,9 @@ def test_commit_ok(self): self.assertSpanAttributes( "CloudSpanner.Batch.commit", - attributes=dict(BASE_ATTRIBUTES, num_mutations=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutations=1, x_goog_spanner_request_id=req_id + ), ) def test_aborted_exception_on_commit_with_retries(self): @@ -277,17 +292,13 @@ def test_aborted_exception_on_commit_with_retries(self): # Assertion: Ensure that calling batch.commit() raises the Aborted exception with self.assertRaises(Aborted) as context: - batch.commit() + batch.commit(timeout_secs=0.1, default_retry_delay=0) # Verify additional details about the exception self.assertEqual(str(context.exception), "409 Transaction was aborted") self.assertGreater( api.commit.call_count, 1, "commit should be called more than once" ) - # Since we are using exponential backoff here and default timeout is set to 30 sec 2^x <= 30. So value for x will be 4 - self.assertEqual( - api.commit.call_count, 4, "commit should be called exactly 4 times" - ) def _test_commit_with_options( self, @@ -342,18 +353,25 @@ def _test_commit_with_options( single_use_txn.isolation_level, isolation_level, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertEqual( metadata, [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertEqual(actual_request_options, expected_request_options) self.assertSpanAttributes( "CloudSpanner.Batch.commit", - attributes=dict(BASE_ATTRIBUTES, num_mutations=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutations=1, x_goog_spanner_request_id=req_id + ), ) self.assertEqual(max_commit_delay_in, max_commit_delay) @@ -452,18 +470,25 @@ def test_context_mgr_success(self): self.assertEqual(mutations, batch._mutations) self.assertIsInstance(single_use_txn, TransactionOptions) self.assertTrue(type(single_use_txn).pb(single_use_txn).HasField("read_write")) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertEqual( metadata, [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertEqual(request_options, RequestOptions()) self.assertSpanAttributes( "CloudSpanner.Batch.commit", - attributes=dict(BASE_ATTRIBUTES, num_mutations=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutations=1, x_goog_spanner_request_id=req_id + ), ) def test_context_mgr_failure(self): @@ -507,10 +532,13 @@ def test_batch_write_already_committed(self): group = groups.group() group.delete(TABLE_NAME, keyset=keyset) groups.batch_write() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.batch_write", status=StatusCode.OK, - attributes=dict(BASE_ATTRIBUTES, num_mutation_groups=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutation_groups=1, x_goog_spanner_request_id=req_id + ), ) assert groups.committed # The second call to batch_write should raise an error. @@ -530,10 +558,13 @@ def test_batch_write_grpc_error(self): with self.assertRaises(Unknown): groups.batch_write() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.batch_write", status=StatusCode.ERROR, - attributes=dict(BASE_ATTRIBUTES, num_mutation_groups=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutation_groups=1, x_goog_spanner_request_id=req_id + ), ) def _test_batch_write_with_request_options( @@ -583,10 +614,16 @@ def _test_batch_write_with_request_options( "traceparent is missing in metadata", ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" + expected_metadata.append( + ("x-goog-spanner-request-id", req_id), + ) + # Remove traceparent from actual metadata for comparison filtered_metadata = [item for item in metadata if item[0] != "traceparent"] self.assertEqual(filtered_metadata, expected_metadata) + if request_options is None: expected_request_options = RequestOptions() elif type(request_options) is dict: @@ -601,7 +638,9 @@ def _test_batch_write_with_request_options( self.assertSpanAttributes( "CloudSpanner.batch_write", status=StatusCode.OK, - attributes=dict(BASE_ATTRIBUTES, num_mutation_groups=1), + attributes=dict( + BASE_ATTRIBUTES, num_mutation_groups=1, x_goog_spanner_request_id=req_id + ), ) def test_batch_write_no_request_options(self): @@ -639,12 +678,39 @@ def session_id(self): class _Database(object): + name = "testing" + _route_to_leader_enabled = True + NTH_CLIENT_ID = AtomicCounter() + def __init__(self, enable_end_to_end_tracing=False): self.name = "testing" self._route_to_leader_enabled = True if enable_end_to_end_tracing: self.observability_options = dict(enable_end_to_end_tracing=True) self.default_transaction_options = DefaultTransactionOptions() + self._nth_request = 0 + self._nth_client_id = _Database.NTH_CLIENT_ID.increment() + + @property + def _next_nth_request(self): + self._nth_request += 1 + return self._nth_request + + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + + @property + def _channel_id(self): + return 1 class _FauxSpannerAPI: diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index a464209874..6084224a84 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -16,6 +16,8 @@ import os import mock +from google.auth.credentials import AnonymousCredentials + from google.cloud.spanner_v1 import DirectedReadOptions, DefaultTransactionOptions @@ -513,7 +515,7 @@ def test_list_instance_configs(self): from google.cloud.spanner_admin_instance_v1 import ListInstanceConfigsRequest from google.cloud.spanner_admin_instance_v1 import ListInstanceConfigsResponse - api = InstanceAdminClient() + api = InstanceAdminClient(credentials=AnonymousCredentials()) credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client._instance_admin_api = api @@ -560,8 +562,8 @@ def test_list_instance_configs_w_options(self): from google.cloud.spanner_admin_instance_v1 import ListInstanceConfigsRequest from google.cloud.spanner_admin_instance_v1 import ListInstanceConfigsResponse - api = InstanceAdminClient() credentials = _make_credentials() + api = InstanceAdminClient(credentials=credentials) client = self._make_one(project=self.PROJECT, credentials=credentials) client._instance_admin_api = api @@ -636,8 +638,8 @@ def test_list_instances(self): from google.cloud.spanner_admin_instance_v1 import ListInstancesRequest from google.cloud.spanner_admin_instance_v1 import ListInstancesResponse - api = InstanceAdminClient() credentials = _make_credentials() + api = InstanceAdminClient(credentials=credentials) client = self._make_one(project=self.PROJECT, credentials=credentials) client._instance_admin_api = api @@ -684,8 +686,8 @@ def test_list_instances_w_options(self): from google.cloud.spanner_admin_instance_v1 import ListInstancesRequest from google.cloud.spanner_admin_instance_v1 import ListInstancesResponse - api = InstanceAdminClient() credentials = _make_credentials() + api = InstanceAdminClient(credentials=credentials) client = self._make_one(project=self.PROJECT, credentials=credentials) client._instance_admin_api = api diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py index 1afda7f850..aee1c83f62 100644 --- a/tests/unit/test_database.py +++ b/tests/unit/test_database.py @@ -30,6 +30,11 @@ DirectedReadOptions, DefaultTransactionOptions, ) +from google.cloud.spanner_v1._helpers import ( + AtomicCounter, + _metadata_with_request_id, +) +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID DML_WO_PARAM = """ DELETE FROM citizens @@ -115,7 +120,9 @@ def _make_database_admin_api(): def _make_spanner_api(): from google.cloud.spanner_v1 import SpannerClient - return mock.create_autospec(SpannerClient, instance=True) + api = mock.create_autospec(SpannerClient, instance=True) + api._transport = "transport" + return api def test_ctor_defaults(self): from google.cloud.spanner_v1.pool import BurstyPool @@ -549,7 +556,13 @@ def test_create_grpc_error(self): api.create_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_create_already_exists(self): @@ -576,7 +589,13 @@ def test_create_already_exists(self): api.create_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_create_instance_not_found(self): @@ -602,7 +621,13 @@ def test_create_instance_not_found(self): api.create_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_create_success(self): @@ -638,7 +663,13 @@ def test_create_success(self): api.create_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_create_success_w_encryption_config_dict(self): @@ -675,7 +706,13 @@ def test_create_success_w_encryption_config_dict(self): api.create_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_create_success_w_proto_descriptors(self): @@ -710,7 +747,13 @@ def test_create_success_w_proto_descriptors(self): api.create_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_exists_grpc_error(self): @@ -728,7 +771,13 @@ def test_exists_grpc_error(self): api.get_database_ddl.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_exists_not_found(self): @@ -745,7 +794,13 @@ def test_exists_not_found(self): api.get_database_ddl.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_exists_success(self): @@ -764,7 +819,13 @@ def test_exists_success(self): api.get_database_ddl.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_reload_grpc_error(self): @@ -782,7 +843,13 @@ def test_reload_grpc_error(self): api.get_database_ddl.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_reload_not_found(self): @@ -800,7 +867,13 @@ def test_reload_not_found(self): api.get_database_ddl.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_reload_success(self): @@ -859,11 +932,23 @@ def test_reload_success(self): api.get_database_ddl.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) api.get_database.assert_called_once_with( name=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], ) def test_update_ddl_grpc_error(self): @@ -889,7 +974,13 @@ def test_update_ddl_grpc_error(self): api.update_database_ddl.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_update_ddl_not_found(self): @@ -915,7 +1006,13 @@ def test_update_ddl_not_found(self): api.update_database_ddl.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_update_ddl(self): @@ -942,7 +1039,13 @@ def test_update_ddl(self): api.update_database_ddl.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_update_ddl_w_operation_id(self): @@ -969,7 +1072,13 @@ def test_update_ddl_w_operation_id(self): api.update_database_ddl.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_update_success(self): @@ -995,7 +1104,13 @@ def test_update_success(self): api.update_database.assert_called_once_with( database=expected_database, update_mask=field_mask, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_update_ddl_w_proto_descriptors(self): @@ -1023,7 +1138,13 @@ def test_update_ddl_w_proto_descriptors(self): api.update_database_ddl.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_drop_grpc_error(self): @@ -1041,7 +1162,13 @@ def test_drop_grpc_error(self): api.drop_database.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_drop_not_found(self): @@ -1059,7 +1186,13 @@ def test_drop_not_found(self): api.drop_database.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_drop_success(self): @@ -1076,7 +1209,13 @@ def test_drop_success(self): api.drop_database.assert_called_once_with( database=self.DATABASE_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def _execute_partitioned_dml_helper( @@ -1089,6 +1228,7 @@ def _execute_partitioned_dml_helper( retried=False, exclude_txn_from_change_streams=False, ): + import os from google.api_core.exceptions import Aborted from google.api_core.retry import Retry from google.protobuf.struct_pb2 import Struct @@ -1123,6 +1263,31 @@ def _execute_partitioned_dml_helper( session = _Session() pool.put(session) database = self._make_one(self.DATABASE_ID, instance, pool=pool) + + multiplexed_partitioned_enabled = ( + os.environ.get( + "GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_PARTITIONED_OPS", "false" + ).lower() + == "true" + ) + + if multiplexed_partitioned_enabled: + # When multiplexed sessions are enabled, create a mock multiplexed session + # that the sessions manager will return + multiplexed_session = _Session() + multiplexed_session.name = ( + self.SESSION_NAME + ) # Use the expected session name + multiplexed_session.is_multiplexed = True + # Configure the sessions manager to return the multiplexed session + database._sessions_manager.get_session = mock.Mock( + return_value=multiplexed_session + ) + expected_session = multiplexed_session + else: + # When multiplexed sessions are disabled, use the regular pool session + expected_session = session + api = database._spanner_api = self._make_spanner_api() api._method_configs = {"ExecuteStreamingSql": MethodConfig(retry=Retry())} if retried: @@ -1149,18 +1314,59 @@ def _execute_partitioned_dml_helper( exclude_txn_from_change_streams=exclude_txn_from_change_streams, ) - api.begin_transaction.assert_called_with( - session=session.name, - options=txn_options, - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), - ], - ) if retried: + api.begin_transaction.assert_called_with( + session=expected_session.name, + options=txn_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), + ], + ) self.assertEqual(api.begin_transaction.call_count, 2) + api.begin_transaction.assert_called_with( + session=expected_session.name, + options=txn_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + # Please note that this try was by an abort and not from service unavailable. + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), + ], + ) else: + api.begin_transaction.assert_called_with( + session=expected_session.name, + options=txn_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + ) self.assertEqual(api.begin_transaction.call_count, 1) + api.begin_transaction.assert_called_with( + session=expected_session.name, + options=txn_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + ) if params: expected_params = Struct( @@ -1191,18 +1397,11 @@ def _execute_partitioned_dml_helper( request_options=expected_request_options, ) - api.execute_streaming_sql.assert_any_call( - request=expected_request, - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), - ], - ) if retried: expected_retry_transaction = TransactionSelector( id=self.RETRY_TRANSACTION_ID ) - expected_request = ExecuteSqlRequest( + expected_request_with_retry = ExecuteSqlRequest( session=self.SESSION_NAME, sql=dml, transaction=expected_retry_transaction, @@ -1211,17 +1410,59 @@ def _execute_partitioned_dml_helper( query_options=expected_query_options, request_options=expected_request_options, ) - api.execute_streaming_sql.assert_called_with( + + self.assertEqual( + api.execute_streaming_sql.call_args_list, + [ + mock.call( + request=expected_request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], + ), + mock.call( + request=expected_request_with_retry, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.4.1", + ), + ], + ), + ], + ) + self.assertEqual(api.execute_streaming_sql.call_count, 2) + else: + api.execute_streaming_sql.assert_any_call( request=expected_request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) - self.assertEqual(api.execute_streaming_sql.call_count, 2) - else: self.assertEqual(api.execute_streaming_sql.call_count, 1) + # Verify that the correct session type was used based on environment + if multiplexed_partitioned_enabled: + # Verify that sessions_manager.get_session was called with PARTITIONED transaction type + from google.cloud.spanner_v1.session_options import TransactionType + + database._sessions_manager.get_session.assert_called_with( + TransactionType.PARTITIONED + ) + # If multiplexed sessions are not enabled, the regular pool session should be used + def test_execute_partitioned_dml_wo_params(self): self._execute_partitioned_dml_helper(dml=DML_WO_PARAM) @@ -1298,7 +1539,9 @@ def test_session_factory_w_labels(self): self.assertEqual(session.labels, labels) def test_snapshot_defaults(self): + import os from google.cloud.spanner_v1.database import SnapshotCheckout + from google.cloud.spanner_v1.snapshot import Snapshot client = _Client() instance = _Instance(self.INSTANCE_NAME, client=client) @@ -1307,15 +1550,47 @@ def test_snapshot_defaults(self): pool.put(session) database = self._make_one(self.DATABASE_ID, instance, pool=pool) + # Check if multiplexed sessions are enabled for read operations + multiplexed_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS") == "true" + ) + + if multiplexed_enabled: + # When multiplexed sessions are enabled, configure the sessions manager + # to return a multiplexed session for read operations + multiplexed_session = _Session() + multiplexed_session.name = self.SESSION_NAME + multiplexed_session.is_multiplexed = True + # Override the side_effect to return the multiplexed session + database._sessions_manager.get_session = mock.Mock( + return_value=multiplexed_session + ) + expected_session = multiplexed_session + else: + expected_session = session + checkout = database.snapshot() self.assertIsInstance(checkout, SnapshotCheckout) self.assertIs(checkout._database, database) self.assertEqual(checkout._kw, {}) + with checkout as snapshot: + if not multiplexed_enabled: + self.assertIsNone(pool._session) + self.assertIsInstance(snapshot, Snapshot) + self.assertIs(snapshot._session, expected_session) + self.assertTrue(snapshot._strong) + self.assertFalse(snapshot._multi_use) + + if not multiplexed_enabled: + self.assertIs(pool._session, session) + def test_snapshot_w_read_timestamp_and_multi_use(self): import datetime + import os from google.cloud._helpers import UTC from google.cloud.spanner_v1.database import SnapshotCheckout + from google.cloud.spanner_v1.snapshot import Snapshot now = datetime.datetime.utcnow().replace(tzinfo=UTC) client = _Client() @@ -1325,12 +1600,42 @@ def test_snapshot_w_read_timestamp_and_multi_use(self): pool.put(session) database = self._make_one(self.DATABASE_ID, instance, pool=pool) + # Check if multiplexed sessions are enabled for read operations + multiplexed_enabled = ( + os.getenv("GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS") == "true" + ) + + if multiplexed_enabled: + # When multiplexed sessions are enabled, configure the sessions manager + # to return a multiplexed session for read operations + multiplexed_session = _Session() + multiplexed_session.name = self.SESSION_NAME + multiplexed_session.is_multiplexed = True + # Override the side_effect to return the multiplexed session + database._sessions_manager.get_session = mock.Mock( + return_value=multiplexed_session + ) + expected_session = multiplexed_session + else: + expected_session = session + checkout = database.snapshot(read_timestamp=now, multi_use=True) self.assertIsInstance(checkout, SnapshotCheckout) self.assertIs(checkout._database, database) self.assertEqual(checkout._kw, {"read_timestamp": now, "multi_use": True}) + with checkout as snapshot: + if not multiplexed_enabled: + self.assertIsNone(pool._session) + self.assertIsInstance(snapshot, Snapshot) + self.assertIs(snapshot._session, expected_session) + self.assertEqual(snapshot._read_timestamp, now) + self.assertTrue(snapshot._multi_use) + + if not multiplexed_enabled: + self.assertIs(pool._session, session) + def test_batch(self): from google.cloud.spanner_v1.database import BatchCheckout @@ -1401,7 +1706,7 @@ def test_run_in_transaction_wo_args(self): import datetime NOW = datetime.datetime.now() - client = _Client() + client = _Client(observability_options=dict(enable_end_to_end_tracing=True)) instance = _Instance(self.INSTANCE_NAME, client=client) pool = _Pool() session = _Session() @@ -1490,7 +1795,13 @@ def test_restore_grpc_error(self): api.restore_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_restore_not_found(self): @@ -1516,7 +1827,13 @@ def test_restore_not_found(self): api.restore_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_restore_success(self): @@ -1553,7 +1870,13 @@ def test_restore_success(self): api.restore_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_restore_success_w_encryption_config_dict(self): @@ -1594,7 +1917,13 @@ def test_restore_success_w_encryption_config_dict(self): api.restore_database.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_restore_w_invalid_encryption_config_dict(self): @@ -1741,7 +2070,13 @@ def test_list_database_roles_grpc_error(self): api.list_database_roles.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_list_database_roles_defaults(self): @@ -1762,7 +2097,13 @@ def test_list_database_roles_defaults(self): api.list_database_roles.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) self.assertIsNotNone(resp) @@ -1849,6 +2190,10 @@ def test_context_mgr_success(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -1896,6 +2241,10 @@ def test_context_mgr_w_commit_stats_success(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -1916,7 +2265,7 @@ def test_context_mgr_w_aborted_commit_status(self): pool = database._pool = _Pool() session = _Session(database) pool.put(session) - checkout = self._make_one(database) + checkout = self._make_one(database, timeout_secs=0.1, default_retry_delay=0) with self.assertRaises(Aborted): with checkout as batch: @@ -1935,14 +2284,16 @@ def test_context_mgr_w_aborted_commit_status(self): return_commit_stats=True, request_options=RequestOptions(), ) - # Asserts that the exponential backoff retry for aborted transactions with a 30-second deadline - # allows for a maximum of 4 retries (2^x <= 30) to stay within the time limit. - self.assertEqual(api.commit.call_count, 4) + self.assertGreater(api.commit.call_count, 1) api.commit.assert_any_call( request=request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -2216,10 +2567,17 @@ def test__get_session_already(self): def test__get_session_new(self): database = self._make_database() - session = database.session.return_value = self._make_session() + session = self._make_session() + # Configure sessions_manager to return the session for partition operations + database.sessions_manager.get_session.return_value = session batch_txn = self._make_one(database) self.assertIs(batch_txn._get_session(), session) - session.create.assert_called_once_with() + # Verify that sessions_manager.get_session was called with PARTITIONED transaction type + from google.cloud.spanner_v1.session_options import TransactionType + + database.sessions_manager.get_session.assert_called_once_with( + TransactionType.PARTITIONED + ) def test__get_snapshot_already(self): database = self._make_database() @@ -2854,11 +3212,25 @@ def test_close_w_session(self): database = self._make_database() batch_txn = self._make_one(database) session = batch_txn._session = self._make_session() + # Configure session as non-multiplexed (default behavior) + session.is_multiplexed = False batch_txn.close() session.delete.assert_called_once_with() + def test_close_w_multiplexed_session(self): + database = self._make_database() + batch_txn = self._make_one(database) + session = batch_txn._session = self._make_session() + # Configure session as multiplexed + session.is_multiplexed = True + + batch_txn.close() + + # Multiplexed sessions should not be deleted + session.delete.assert_not_called() + def test_process_w_invalid_batch(self): token = b"TOKEN" batch = {"partition": token, "bogus": b"BOGUS"} @@ -3017,6 +3389,10 @@ def test_context_mgr_success(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -3115,12 +3491,15 @@ def _make_database_admin_api(): class _Client(object): + NTH_CLIENT = AtomicCounter() + def __init__( self, project=TestDatabase.PROJECT_ID, route_to_leader_enabled=True, directed_read_options=None, default_transaction_options=DefaultTransactionOptions(), + observability_options=None, ): from google.cloud.spanner_v1 import ExecuteSqlRequest @@ -3135,6 +3514,13 @@ def __init__( self.route_to_leader_enabled = route_to_leader_enabled self.directed_read_options = directed_read_options self.default_transaction_options = default_transaction_options + self.observability_options = observability_options + self._nth_client_id = _Client.NTH_CLIENT.increment() + self._nth_request = AtomicCounter() + + @property + def _next_nth_request(self): + return self._nth_request.increment() class _Instance(object): @@ -3153,6 +3539,7 @@ def __init__(self, name): class _Database(object): log_commit_stats = False _route_to_leader_enabled = True + NTH_CLIENT_ID = AtomicCounter() def __init__(self, name, instance=None): self.name = name @@ -3163,6 +3550,51 @@ def __init__(self, name, instance=None): self.logger = mock.create_autospec(Logger, instance=True) self._directed_read_options = None self.default_transaction_options = DefaultTransactionOptions() + self._nth_request = AtomicCounter() + self._nth_client_id = _Database.NTH_CLIENT_ID.increment() + + # Mock sessions manager for multiplexed sessions support + self._sessions_manager = mock.Mock() + # Configure get_session to return sessions from the pool + self._sessions_manager.get_session = mock.Mock( + side_effect=lambda tx_type: self._pool.get() + if hasattr(self, "_pool") and self._pool + else None + ) + self._sessions_manager.put_session = mock.Mock( + side_effect=lambda session: self._pool.put(session) + if hasattr(self, "_pool") and self._pool + else None + ) + + @property + def sessions_manager(self): + """Returns the database sessions manager. + + :rtype: Mock + :returns: The mock sessions manager for this database. + """ + return self._sessions_manager + + @property + def _next_nth_request(self): + return self._nth_request.increment() + + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + + @property + def _channel_id(self): + return 1 class _Pool(object): @@ -3191,6 +3623,7 @@ def __init__( self._database = database self.name = name self._run_transaction_function = run_transaction_function + self.is_multiplexed = False # Default to non-multiplexed for tests def run_in_transaction(self, func, *args, **kw): if self._run_transaction_function: diff --git a/tests/unit/test_instance.py b/tests/unit/test_instance.py index e7ad729438..f3bf6726c0 100644 --- a/tests/unit/test_instance.py +++ b/tests/unit/test_instance.py @@ -14,6 +14,8 @@ import unittest import mock +from google.auth.credentials import AnonymousCredentials + from google.cloud.spanner_v1 import DefaultTransactionOptions @@ -586,7 +588,7 @@ def test_list_databases(self): from google.cloud.spanner_admin_database_v1 import ListDatabasesRequest from google.cloud.spanner_admin_database_v1 import ListDatabasesResponse - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -625,7 +627,7 @@ def test_list_databases_w_options(self): from google.cloud.spanner_admin_database_v1 import ListDatabasesRequest from google.cloud.spanner_admin_database_v1 import ListDatabasesResponse - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -704,7 +706,7 @@ def test_list_backups_defaults(self): from google.cloud.spanner_admin_database_v1 import ListBackupsRequest from google.cloud.spanner_admin_database_v1 import ListBackupsResponse - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -743,7 +745,7 @@ def test_list_backups_w_options(self): from google.cloud.spanner_admin_database_v1 import ListBackupsRequest from google.cloud.spanner_admin_database_v1 import ListBackupsResponse - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -787,7 +789,7 @@ def test_list_backup_operations_defaults(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -832,7 +834,7 @@ def test_list_backup_operations_w_options(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -884,7 +886,7 @@ def test_list_database_operations_defaults(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) @@ -941,7 +943,7 @@ def test_list_database_operations_w_options(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - api = DatabaseAdminClient() + api = DatabaseAdminClient(credentials=AnonymousCredentials()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index bb2695553b..59fe6d2f61 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -15,6 +15,9 @@ import pytest from unittest.mock import MagicMock from google.api_core.exceptions import ServiceUnavailable +from google.auth import exceptions +from google.auth.credentials import Credentials + from google.cloud.spanner_v1.client import Client from unittest.mock import patch from grpc._interceptor import _UnaryOutcome @@ -28,6 +31,26 @@ # pytest.importorskip("opentelemetry.semconv.attributes.otel_attributes") +class TestCredentials(Credentials): + @property + def expired(self): + return False + + @property + def valid(self): + return True + + def refresh(self, request): + raise exceptions.InvalidOperation("Anonymous credentials cannot be refreshed.") + + def apply(self, headers, token=None): + if token is not None: + raise exceptions.InvalidValue("Anonymous credentials don't support tokens.") + + def before_request(self, request, method, url, headers): + """Anonymous credentials do nothing to the request.""" + + @pytest.fixture(autouse=True) def patched_client(monkeypatch): monkeypatch.setenv("SPANNER_ENABLE_BUILTIN_METRICS", "true") @@ -37,7 +60,11 @@ def patched_client(monkeypatch): if SpannerMetricsTracerFactory._metrics_tracer_factory is not None: SpannerMetricsTracerFactory._metrics_tracer_factory = None - client = Client() + client = Client( + project="test", + credentials=TestCredentials(), + # client_options={"api_endpoint": "none"} + ) yield client # Resetting diff --git a/tests/unit/test_metrics_exporter.py b/tests/unit/test_metrics_exporter.py index 62fb531345..f57984ec66 100644 --- a/tests/unit/test_metrics_exporter.py +++ b/tests/unit/test_metrics_exporter.py @@ -14,6 +14,9 @@ import unittest from unittest.mock import patch, MagicMock, Mock + +from google.auth.credentials import AnonymousCredentials + from google.cloud.spanner_v1.metrics.metrics_exporter import ( CloudMonitoringMetricsExporter, _normalize_label_key, @@ -74,10 +77,6 @@ def setUp(self): unit="counts", ) - def test_default_ctor(self): - exporter = CloudMonitoringMetricsExporter() - self.assertIsNotNone(exporter.project_id) - def test_normalize_label_key(self): """Test label key normalization""" test_cases = [ @@ -236,7 +235,9 @@ def test_metric_timeseries_conversion(self): metrics = self.metric_reader.get_metrics_data() self.assertTrue(metrics is not None) - exporter = CloudMonitoringMetricsExporter(PROJECT_ID) + exporter = CloudMonitoringMetricsExporter( + PROJECT_ID, credentials=AnonymousCredentials() + ) timeseries = exporter._resource_metrics_to_timeseries_pb(metrics) # Both counter values should be summed together @@ -257,7 +258,9 @@ def test_metric_timeseries_scope_filtering(self): # Export metrics metrics = self.metric_reader.get_metrics_data() - exporter = CloudMonitoringMetricsExporter(PROJECT_ID) + exporter = CloudMonitoringMetricsExporter( + PROJECT_ID, credentials=AnonymousCredentials() + ) timeseries = exporter._resource_metrics_to_timeseries_pb(metrics) # Metris with incorrect sope should be filtered out @@ -342,7 +345,9 @@ def test_export_early_exit_if_extras_not_installed(self): with self.assertLogs( "google.cloud.spanner_v1.metrics.metrics_exporter", level="WARNING" ) as log: - exporter = CloudMonitoringMetricsExporter(PROJECT_ID) + exporter = CloudMonitoringMetricsExporter( + PROJECT_ID, credentials=AnonymousCredentials() + ) self.assertFalse(exporter.export([])) self.assertIn( "WARNING:google.cloud.spanner_v1.metrics.metrics_exporter:Metric exporter called without dependencies installed.", @@ -382,12 +387,16 @@ def test_export(self): def test_force_flush(self): """Verify that the unimplemented force flush can be called.""" - exporter = CloudMonitoringMetricsExporter(PROJECT_ID) + exporter = CloudMonitoringMetricsExporter( + PROJECT_ID, credentials=AnonymousCredentials() + ) self.assertTrue(exporter.force_flush()) def test_shutdown(self): """Verify that the unimplemented shutdown can be called.""" - exporter = CloudMonitoringMetricsExporter() + exporter = CloudMonitoringMetricsExporter( + project_id="test", credentials=AnonymousCredentials() + ) try: exporter.shutdown() except Exception as e: @@ -409,7 +418,9 @@ def test_metrics_to_time_series_empty_input( self, mocked_data_point_to_timeseries_pb ): """Verify that metric entries with no timeseries data do not return a time series entry.""" - exporter = CloudMonitoringMetricsExporter() + exporter = CloudMonitoringMetricsExporter( + project_id="test", credentials=AnonymousCredentials() + ) data_point = Mock() metric = Mock(data_points=[data_point]) scope_metric = Mock( @@ -422,7 +433,9 @@ def test_metrics_to_time_series_empty_input( def test_to_point(self): """Verify conversion of datapoints.""" - exporter = CloudMonitoringMetricsExporter() + exporter = CloudMonitoringMetricsExporter( + project_id="test", credentials=AnonymousCredentials() + ) number_point = NumberDataPoint( attributes=[], start_time_unix_nano=0, time_unix_nano=0, value=9 diff --git a/tests/unit/test_pool.py b/tests/unit/test_pool.py index a9593b3651..7c643bc0ea 100644 --- a/tests/unit/test_pool.py +++ b/tests/unit/test_pool.py @@ -19,6 +19,12 @@ from datetime import datetime, timedelta import mock +from google.cloud.spanner_v1._helpers import ( + _metadata_with_request_id, + AtomicCounter, +) +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID + from google.cloud.spanner_v1._opentelemetry_tracing import trace_call from tests._helpers import ( OpenTelemetryBase, @@ -255,7 +261,10 @@ def test_spans_bind_get(self): want_span_names = ["CloudSpanner.FixedPool.BatchCreateSessions", "pool.Get"] assert got_span_names == want_span_names - attrs = TestFixedSizePool.BASE_ATTRIBUTES.copy() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id - 1}.{database._channel_id}.{_Database.NTH_REQUEST.value}.1" + attrs = dict( + TestFixedSizePool.BASE_ATTRIBUTES.copy(), x_goog_spanner_request_id=req_id + ) # Check for the overall spans. self.assertSpanAttributes( @@ -283,7 +292,7 @@ def test_spans_bind_get_empty_pool(self): return # Tests trying to invoke pool.get() from an empty pool. - pool = self._make_one(size=0) + pool = self._make_one(size=0, default_timeout=0.1) database = _Database("name") session1 = _Session(database) with trace_call("pool.Get", session1): @@ -922,7 +931,10 @@ def test_spans_put_full(self): want_span_names = ["CloudSpanner.PingingPool.BatchCreateSessions"] assert got_span_names == want_span_names - attrs = TestPingingPool.BASE_ATTRIBUTES.copy() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id - 1}.{database._channel_id}.{_Database.NTH_REQUEST.value}.1" + attrs = dict( + TestPingingPool.BASE_ATTRIBUTES.copy(), x_goog_spanner_request_id=req_id + ) self.assertSpanAttributes( "CloudSpanner.PingingPool.BatchCreateSessions", attributes=attrs, @@ -1193,6 +1205,9 @@ def session_id(self): class _Database(object): + NTH_REQUEST = AtomicCounter() + NTH_CLIENT_ID = AtomicCounter() + def __init__(self, name): self.name = name self._sessions = [] @@ -1247,6 +1262,30 @@ def session(self, **kwargs): def observability_options(self): return dict(db_name=self.name) + @property + def _next_nth_request(self): + return self.NTH_REQUEST.increment() + + @property + def _nth_client_id(self): + return self.NTH_CLIENT_ID.increment() + + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + + @property + def _channel_id(self): + return 1 + class _Queue(object): _size = 1 diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index 8f5f7039b9..010d59e198 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -49,6 +49,11 @@ from google.protobuf.struct_pb2 import Struct, Value from google.cloud.spanner_v1.batch import Batch from google.cloud.spanner_v1 import DefaultTransactionOptions +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID +from google.cloud.spanner_v1._helpers import ( + AtomicCounter, + _metadata_with_request_id, +) def _make_rpc_error(error_cls, trailing_metadata=None): @@ -57,6 +62,40 @@ def _make_rpc_error(error_cls, trailing_metadata=None): return error_cls("error", errors=(grpc_error,)) +NTH_CLIENT_ID = AtomicCounter() + + +def inject_into_mock_database(mockdb): + setattr(mockdb, "_nth_request", AtomicCounter()) + nth_client_id = NTH_CLIENT_ID.increment() + setattr(mockdb, "_nth_client_id", nth_client_id) + channel_id = 1 + setattr(mockdb, "_channel_id", channel_id) + + def metadata_with_request_id( + nth_request, nth_attempt, prior_metadata=[], span=None + ): + nth_req = nth_request.fget(mockdb) + return _metadata_with_request_id( + nth_client_id, + channel_id, + nth_req, + nth_attempt, + prior_metadata, + span, + ) + + setattr(mockdb, "metadata_with_request_id", metadata_with_request_id) + + @property + def _next_nth_request(self): + return self._nth_request.increment() + + setattr(mockdb, "_next_nth_request", _next_nth_request) + + return mockdb + + class TestSession(OpenTelemetryBase): PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" @@ -95,6 +134,7 @@ def _make_database( database.database_role = database_role database._route_to_leader_enabled = True database.default_transaction_options = default_transaction_options + inject_into_mock_database(database) return database @@ -186,16 +226,24 @@ def test_create_w_database_role(self): session=session_template, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.create_session.assert_called_once_with( request=request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( - "CloudSpanner.CreateSession", attributes=TestSession.BASE_ATTRIBUTES + "CloudSpanner.CreateSession", + attributes=dict( + TestSession.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_create_session_span_annotations(self): @@ -226,6 +274,10 @@ def test_create_session_span_annotations(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -248,16 +300,24 @@ def test_create_wo_database_role(self): database=database.name, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.create_session.assert_called_once_with( request=request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) self.assertSpanAttributes( - "CloudSpanner.CreateSession", attributes=TestSession.BASE_ATTRIBUTES + "CloudSpanner.CreateSession", + attributes=dict( + TestSession.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_create_ok(self): @@ -276,16 +336,24 @@ def test_create_ok(self): database=database.name, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.create_session.assert_called_once_with( request=request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( - "CloudSpanner.CreateSession", attributes=TestSession.BASE_ATTRIBUTES + "CloudSpanner.CreateSession", + attributes=dict( + TestSession.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_create_w_labels(self): @@ -306,17 +374,24 @@ def test_create_w_labels(self): session=SessionRequestProto(labels=labels), ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.create_session.assert_called_once_with( request=request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( "CloudSpanner.CreateSession", - attributes=dict(TestSession.BASE_ATTRIBUTES, foo="bar"), + attributes=dict( + TestSession.BASE_ATTRIBUTES, foo="bar", x_goog_spanner_request_id=req_id + ), ) def test_create_error(self): @@ -329,10 +404,13 @@ def test_create_error(self): with self.assertRaises(Unknown): session.create() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.CreateSession", status=StatusCode.ERROR, - attributes=TestSession.BASE_ATTRIBUTES, + attributes=dict( + TestSession.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_exists_wo_session_id(self): @@ -353,17 +431,26 @@ def test_exists_hit(self): self.assertTrue(session.exists()) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.get_session.assert_called_once_with( name=self.SESSION_NAME, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( "CloudSpanner.GetSession", - attributes=dict(TestSession.BASE_ATTRIBUTES, session_found=True), + attributes=dict( + TestSession.BASE_ATTRIBUTES, + session_found=True, + x_goog_spanner_request_id=req_id, + ), ) @mock.patch( @@ -386,6 +473,10 @@ def test_exists_hit_wo_span(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -401,17 +492,26 @@ def test_exists_miss(self): self.assertFalse(session.exists()) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.get_session.assert_called_once_with( name=self.SESSION_NAME, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( "CloudSpanner.GetSession", - attributes=dict(TestSession.BASE_ATTRIBUTES, session_found=False), + attributes=dict( + TestSession.BASE_ATTRIBUTES, + session_found=False, + x_goog_spanner_request_id=req_id, + ), ) @mock.patch( @@ -433,6 +533,10 @@ def test_exists_miss_wo_span(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -449,18 +553,25 @@ def test_exists_error(self): with self.assertRaises(Unknown): session.exists() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.get_session.assert_called_once_with( name=self.SESSION_NAME, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( "CloudSpanner.GetSession", status=StatusCode.ERROR, - attributes=TestSession.BASE_ATTRIBUTES, + attributes=dict( + TestSession.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_ping_wo_session_id(self): @@ -486,7 +597,13 @@ def test_ping_hit(self): gax_api.execute_sql.assert_called_once_with( request=request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_ping_miss(self): @@ -507,7 +624,13 @@ def test_ping_miss(self): gax_api.execute_sql.assert_called_once_with( request=request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_ping_error(self): @@ -528,7 +651,13 @@ def test_ping_error(self): gax_api.execute_sql.assert_called_once_with( request=request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], ) def test_delete_wo_session_id(self): @@ -550,16 +679,23 @@ def test_delete_hit(self): session.delete() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.delete_session.assert_called_once_with( name=self.SESSION_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], ) attrs = {"session.id": session._session_id, "session.name": session.name} attrs.update(TestSession.BASE_ATTRIBUTES) self.assertSpanAttributes( "CloudSpanner.DeleteSession", - attributes=attrs, + attributes=dict(attrs, x_goog_spanner_request_id=req_id), ) def test_delete_miss(self): @@ -573,12 +709,23 @@ def test_delete_miss(self): with self.assertRaises(NotFound): session.delete() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.delete_session.assert_called_once_with( name=self.SESSION_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], ) - attrs = {"session.id": session._session_id, "session.name": session.name} + attrs = { + "session.id": session._session_id, + "session.name": session.name, + "x_goog_spanner_request_id": req_id, + } attrs.update(TestSession.BASE_ATTRIBUTES) self.assertSpanAttributes( @@ -598,12 +745,23 @@ def test_delete_error(self): with self.assertRaises(Unknown): session.delete() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" gax_api.delete_session.assert_called_once_with( name=self.SESSION_NAME, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], ) - attrs = {"session.id": session._session_id, "session.name": session.name} + attrs = { + "session.id": session._session_id, + "session.name": session.name, + "x_goog_spanner_request_id": req_id, + } attrs.update(TestSession.BASE_ATTRIBUTES) self.assertSpanAttributes( @@ -936,6 +1094,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -949,6 +1111,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -1000,6 +1166,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -1031,7 +1201,9 @@ def unit_of_work(txn, *args, **kw): txn.insert(TABLE_NAME, COLUMNS, VALUES) return "answer" - return_value = session.run_in_transaction(unit_of_work, "abc", some_arg="def") + return_value = session.run_in_transaction( + unit_of_work, "abc", some_arg="def", default_retry_delay=0 + ) self.assertEqual(len(called_with), 2) for index, (txn, args, kw) in enumerate(called_with): @@ -1050,10 +1222,25 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + ), + mock.call( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), ], - ) - ] - * 2, + ), + ], ) request = CommitRequest( session=self.SESSION_NAME, @@ -1069,10 +1256,24 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], + ), + mock.call( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.4.1", + ), ], - ) - ] - * 2, + ), + ], ) def test_run_in_transaction_w_abort_w_retry_metadata(self): @@ -1135,10 +1336,25 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + ), + mock.call( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), ], - ) - ] - * 2, + ), + ], ) request = CommitRequest( session=self.SESSION_NAME, @@ -1154,10 +1370,24 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], + ), + mock.call( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.4.1", + ), ], - ) - ] - * 2, + ), + ], ) def test_run_in_transaction_w_callback_raises_abort_wo_metadata(self): @@ -1219,6 +1449,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -1232,6 +1466,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -1295,6 +1533,10 @@ def _time(_results=[1, 1.5]): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -1308,6 +1550,10 @@ def _time(_results=[1, 1.5]): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -1367,10 +1613,37 @@ def _time(_results=[1, 2, 4, 8]): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + ), + mock.call( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), + ], + ), + mock.call( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.5.1", + ), ], - ) - ] - * 3, + ), + ], ) request = CommitRequest( session=self.SESSION_NAME, @@ -1386,10 +1659,35 @@ def _time(_results=[1, 2, 4, 8]): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], - ) - ] - * 3, + ), + mock.call( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.4.1", + ), + ], + ), + mock.call( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.6.1", + ), + ], + ), + ], ) def test_run_in_transaction_w_commit_stats_success(self): @@ -1438,6 +1736,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -1452,6 +1754,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) database.logger.info.assert_called_once_with( @@ -1500,6 +1806,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -1514,6 +1824,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) database.logger.info.assert_not_called() @@ -1566,6 +1880,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -1579,6 +1897,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -1631,6 +1953,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) request = CommitRequest( @@ -1644,6 +1970,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -1717,10 +2047,25 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + ), + mock.call( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), ], - ) - ] - * 2, + ), + ], ) request = CommitRequest( session=self.SESSION_NAME, @@ -1736,10 +2081,24 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], - ) - ] - * 2, + ), + mock.call( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.4.1", + ), + ], + ), + ], ) def test_run_in_transaction_w_isolation_level_at_request(self): @@ -1771,6 +2130,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -1805,6 +2168,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -1843,6 +2210,10 @@ def unit_of_work(txn, *args, **kw): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -1858,7 +2229,7 @@ def _time_func(): # check if current time > deadline with mock.patch("time.time", _time_func): with self.assertRaises(Exception): - _delay_until_retry(exc_mock, 2, 1) + _delay_until_retry(exc_mock, 2, 1, default_retry_delay=0) with mock.patch("time.time", _time_func): with mock.patch( diff --git a/tests/unit/test_snapshot.py b/tests/unit/test_snapshot.py index 11fc0135d1..bb0db5db0f 100644 --- a/tests/unit/test_snapshot.py +++ b/tests/unit/test_snapshot.py @@ -24,7 +24,12 @@ HAS_OPENTELEMETRY_INSTALLED, enrich_with_otel_scope, ) +from google.cloud.spanner_v1._helpers import ( + _metadata_with_request_id, + AtomicCounter, +) from google.cloud.spanner_v1.param_types import INT64 +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID from google.api_core.retry import Retry TABLE_NAME = "citizens" @@ -135,6 +140,7 @@ def _call_fut( session, attributes, transaction=derived, + request_id_manager=None if not session else session._database, ) def _make_item(self, value, resume_token=b"", metadata=None): @@ -153,9 +159,17 @@ def test_iteration_w_empty_raw(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), []) - restart.assert_called_once_with(request=request, metadata=None) + restart.assert_called_once_with( + request=request, + metadata=[ + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ) + ], + ) self.assertNoSpans() def test_iteration_w_non_empty_raw(self): @@ -167,9 +181,17 @@ def test_iteration_w_non_empty_raw(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(ITEMS)) - restart.assert_called_once_with(request=request, metadata=None) + restart.assert_called_once_with( + request=request, + metadata=[ + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ) + ], + ) self.assertNoSpans() def test_iteration_w_raw_w_resume_tken(self): @@ -186,9 +208,17 @@ def test_iteration_w_raw_w_resume_tken(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(ITEMS)) - restart.assert_called_once_with(request=request, metadata=None) + restart.assert_called_once_with( + request=request, + metadata=[ + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ) + ], + ) self.assertNoSpans() def test_iteration_w_raw_raising_unavailable_no_token(self): @@ -207,7 +237,7 @@ def test_iteration_w_raw_raising_unavailable_no_token(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(ITEMS)) self.assertEqual(len(restart.mock_calls), 2) self.assertEqual(request.resume_token, b"") @@ -234,7 +264,7 @@ def test_iteration_w_raw_raising_retryable_internal_error_no_token(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(ITEMS)) self.assertEqual(len(restart.mock_calls), 2) self.assertEqual(request.resume_token, b"") @@ -256,10 +286,18 @@ def test_iteration_w_raw_raising_non_retryable_internal_error_no_token(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) with self.assertRaises(InternalServerError): list(resumable) - restart.assert_called_once_with(request=request, metadata=None) + restart.assert_called_once_with( + request=request, + metadata=[ + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ) + ], + ) self.assertNoSpans() def test_iteration_w_raw_raising_unavailable(self): @@ -278,7 +316,7 @@ def test_iteration_w_raw_raising_unavailable(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(FIRST + LAST)) self.assertEqual(len(restart.mock_calls), 2) self.assertEqual(request.resume_token, RESUME_TOKEN) @@ -295,7 +333,7 @@ def test_iteration_w_raw_raising_retryable_internal_error(self): fail_after=True, error=InternalServerError( "Received unexpected EOS on DATA frame from server" - ) + ), ) after = _MockIterator(*LAST) request = mock.Mock(test="test", spec=["test", "resume_token"]) @@ -304,7 +342,7 @@ def test_iteration_w_raw_raising_retryable_internal_error(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(FIRST + LAST)) self.assertEqual(len(restart.mock_calls), 2) self.assertEqual(request.resume_token, RESUME_TOKEN) @@ -326,10 +364,18 @@ def test_iteration_w_raw_raising_non_retryable_internal_error(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) with self.assertRaises(InternalServerError): list(resumable) - restart.assert_called_once_with(request=request, metadata=None) + restart.assert_called_once_with( + request=request, + metadata=[ + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ) + ], + ) self.assertNoSpans() def test_iteration_w_raw_raising_unavailable_after_token(self): @@ -347,7 +393,7 @@ def test_iteration_w_raw_raising_unavailable_after_token(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(FIRST + SECOND)) self.assertEqual(len(restart.mock_calls), 2) self.assertEqual(request.resume_token, RESUME_TOKEN) @@ -370,7 +416,7 @@ def test_iteration_w_raw_w_multiuse(self): session = _Session(database) derived = self._makeDerived(session) derived._multi_use = True - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(FIRST)) self.assertEqual(len(restart.mock_calls), 1) begin_count = sum( @@ -401,7 +447,7 @@ def test_iteration_w_raw_raising_unavailable_w_multiuse(self): session = _Session(database) derived = self._makeDerived(session) derived._multi_use = True - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(SECOND)) self.assertEqual(len(restart.mock_calls), 2) begin_count = sum( @@ -440,7 +486,7 @@ def test_iteration_w_raw_raising_unavailable_after_token_w_multiuse(self): derived = self._makeDerived(session) derived._multi_use = True - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(FIRST + SECOND)) self.assertEqual(len(restart.mock_calls), 2) @@ -467,7 +513,7 @@ def test_iteration_w_raw_raising_retryable_internal_error_after_token(self): fail_after=True, error=InternalServerError( "Received unexpected EOS on DATA frame from server" - ) + ), ) after = _MockIterator(*SECOND) request = mock.Mock(test="test", spec=["test", "resume_token"]) @@ -476,7 +522,7 @@ def test_iteration_w_raw_raising_retryable_internal_error_after_token(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) self.assertEqual(list(resumable), list(FIRST + SECOND)) self.assertEqual(len(restart.mock_calls), 2) self.assertEqual(request.resume_token, RESUME_TOKEN) @@ -497,10 +543,18 @@ def test_iteration_w_raw_raising_non_retryable_internal_error_after_token(self): database.spanner_api = self._make_spanner_api() session = _Session(database) derived = self._makeDerived(session) - resumable = self._call_fut(derived, restart, request) + resumable = self._call_fut(derived, restart, request, session=session) with self.assertRaises(InternalServerError): list(resumable) - restart.assert_called_once_with(request=request, metadata=None) + restart.assert_called_once_with( + request=request, + metadata=[ + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ) + ], + ) self.assertNoSpans() def test_iteration_w_span_creation(self): @@ -517,7 +571,13 @@ def test_iteration_w_span_creation(self): derived, restart, request, name, _Session(_Database()), extra_atts ) self.assertEqual(list(resumable), []) - self.assertSpanAttributes(name, attributes=dict(BASE_ATTRIBUTES, test_att=1)) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" + self.assertSpanAttributes( + name, + attributes=dict( + BASE_ATTRIBUTES, test_att=1, x_goog_spanner_request_id=req_id + ), + ) def test_iteration_w_multiple_span_creation(self): from google.api_core.exceptions import ServiceUnavailable @@ -546,11 +606,15 @@ def test_iteration_w_multiple_span_creation(self): span_list = self.ot_exporter.get_finished_spans() self.assertEqual(len(span_list), 2) - for span in span_list: + for i, span in enumerate(span_list): self.assertEqual(span.name, name) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.{i + 1}" self.assertEqual( dict(span.attributes), - enrich_with_otel_scope(BASE_ATTRIBUTES), + dict( + enrich_with_otel_scope(BASE_ATTRIBUTES), + x_goog_spanner_request_id=req_id, + ), ) @@ -625,11 +689,15 @@ def test_read_other_error(self): with self.assertRaises(RuntimeError): list(derived.read(TABLE_NAME, COLUMNS, keyset)) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner._Derived.read", status=StatusCode.ERROR, attributes=dict( - BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS) + BASE_ATTRIBUTES, + table_id=TABLE_NAME, + columns=tuple(COLUMNS), + x_goog_spanner_request_id=req_id, ), ) @@ -775,9 +843,16 @@ def _read_helper( request_options=expected_request_options, directed_read_options=expected_directed_read_options, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" api.streaming_read.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], retry=retry, timeout=timeout, ) @@ -785,7 +860,10 @@ def _read_helper( self.assertSpanAttributes( "CloudSpanner._Derived.read", attributes=dict( - BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS) + BASE_ATTRIBUTES, + table_id=TABLE_NAME, + columns=tuple(COLUMNS), + x_goog_spanner_request_id=req_id, ), ) @@ -877,10 +955,14 @@ def test_execute_sql_other_error(self): self.assertEqual(derived._execute_sql_count, 1) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner._Derived.execute_sql", status=StatusCode.ERROR, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY}), + attributes=dict( + BASE_ATTRIBUTES, + **{"db.statement": SQL_QUERY, "x_goog_spanner_request_id": req_id}, + ), ) def _execute_sql_helper( @@ -1024,9 +1106,16 @@ def _execute_sql_helper( seqno=sql_count, directed_read_options=expected_directed_read_options, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" api.execute_streaming_sql.assert_called_once_with( request=expected_request, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], timeout=timeout, retry=retry, ) @@ -1036,7 +1125,13 @@ def _execute_sql_helper( self.assertSpanAttributes( "CloudSpanner._Derived.execute_sql", status=StatusCode.OK, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}), + attributes=dict( + BASE_ATTRIBUTES, + **{ + "db.statement": SQL_QUERY_WITH_PARAM, + "x_goog_spanner_request_id": req_id, + }, + ), ) def test_execute_sql_wo_multi_use(self): @@ -1194,11 +1289,16 @@ def _partition_read_helper( index=index, partition_options=expected_partition_options, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" api.partition_read.assert_called_once_with( request=expected_request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], retry=retry, timeout=timeout, @@ -1208,6 +1308,7 @@ def _partition_read_helper( BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS), + x_goog_spanner_request_id=req_id, ) if index: want_span_attributes["index"] = index @@ -1240,11 +1341,15 @@ def test_partition_read_other_error(self): with self.assertRaises(RuntimeError): list(derived.partition_read(TABLE_NAME, COLUMNS, keyset)) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner._Derived.partition_read", status=StatusCode.ERROR, attributes=dict( - BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS) + BASE_ATTRIBUTES, + table_id=TABLE_NAME, + columns=tuple(COLUMNS), + x_goog_spanner_request_id=req_id, ), ) @@ -1373,11 +1478,16 @@ def _partition_query_helper( param_types=PARAM_TYPES, partition_options=expected_partition_options, ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" api.partition_query.assert_called_once_with( request=expected_request, metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], retry=retry, timeout=timeout, @@ -1386,7 +1496,13 @@ def _partition_query_helper( self.assertSpanAttributes( "CloudSpanner._Derived.partition_query", status=StatusCode.OK, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}), + attributes=dict( + BASE_ATTRIBUTES, + **{ + "db.statement": SQL_QUERY_WITH_PARAM, + "x_goog_spanner_request_id": req_id, + }, + ), ) def test_partition_query_other_error(self): @@ -1401,10 +1517,14 @@ def test_partition_query_other_error(self): with self.assertRaises(RuntimeError): list(derived.partition_query(SQL_QUERY)) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner._Derived.partition_query", status=StatusCode.ERROR, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY}), + attributes=dict( + BASE_ATTRIBUTES, + **{"db.statement": SQL_QUERY, "x_goog_spanner_request_id": req_id}, + ), ) def test_partition_query_single_use_raises(self): @@ -1719,10 +1839,11 @@ def test_begin_w_other_error(self): want_span_names = ["CloudSpanner.Snapshot.begin"] assert got_span_names == want_span_names + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.Snapshot.begin", status=StatusCode.ERROR, - attributes=BASE_ATTRIBUTES, + attributes=dict(BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id), ) def test_begin_w_retry(self): @@ -1771,16 +1892,23 @@ def test_begin_ok_exact_staleness(self): ) ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" api.begin_transaction.assert_called_once_with( session=session.name, options=expected_txn_options, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], ) self.assertSpanAttributes( "CloudSpanner.Snapshot.begin", status=StatusCode.OK, - attributes=BASE_ATTRIBUTES, + attributes=dict(BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id), ) def test_begin_ok_exact_strong(self): @@ -1807,24 +1935,39 @@ def test_begin_ok_exact_strong(self): ) ) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" api.begin_transaction.assert_called_once_with( session=session.name, options=expected_txn_options, - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + req_id, + ), + ], ) self.assertSpanAttributes( "CloudSpanner.Snapshot.begin", status=StatusCode.OK, - attributes=BASE_ATTRIBUTES, + attributes=dict(BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id), ) class _Client(object): + NTH_CLIENT = AtomicCounter() + def __init__(self): from google.cloud.spanner_v1 import ExecuteSqlRequest self._query_options = ExecuteSqlRequest.QueryOptions(optimizer_version="1") + self._nth_client_id = _Client.NTH_CLIENT.increment() + self._nth_request = AtomicCounter() + + @property + def _next_nth_request(self): + return self._nth_request.increment() class _Instance(object): @@ -1835,6 +1978,7 @@ def __init__(self): class _Database(object): def __init__(self, directed_read_options=None): self.name = "testing" + self._nth_request = 0 self._instance = _Instance() self._route_to_leader_enabled = True self._directed_read_options = directed_read_options @@ -1843,6 +1987,31 @@ def __init__(self, directed_read_options=None): def observability_options(self): return dict(db_name=self.name) + @property + def _next_nth_request(self): + self._nth_request += 1 + return self._nth_request + + @property + def _nth_client_id(self): + return 1 + + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + + @property + def _channel_id(self): + return 1 + class _Session(object): def __init__(self, database=None, name=TestSnapshot.SESSION_NAME): diff --git a/tests/unit/test_spanner.py b/tests/unit/test_spanner.py index 8bd95c7228..4acd7d3798 100644 --- a/tests/unit/test_spanner.py +++ b/tests/unit/test_spanner.py @@ -38,10 +38,12 @@ from google.cloud.spanner_v1.keyset import KeySet from google.cloud.spanner_v1._helpers import ( + AtomicCounter, _make_value_pb, _merge_query_options, + _metadata_with_request_id, ) - +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID import mock from google.api_core import gapic_v1 @@ -522,6 +524,10 @@ def test_transaction_should_include_begin_with_first_update(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.1.1.1", + ), ], ) @@ -537,6 +543,10 @@ def test_transaction_should_include_begin_with_first_query(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], timeout=TIMEOUT, retry=RETRY, @@ -554,6 +564,10 @@ def test_transaction_should_include_begin_with_first_read(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -570,6 +584,10 @@ def test_transaction_should_include_begin_with_first_batch_update(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -595,6 +613,10 @@ def test_transaction_should_include_begin_w_exclude_txn_from_change_streams_with metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -621,6 +643,10 @@ def test_transaction_should_include_begin_w_isolation_level_with_first_update( metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -639,6 +665,10 @@ def test_transaction_should_use_transaction_id_if_error_with_first_batch_update( metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -653,6 +683,10 @@ def test_transaction_should_use_transaction_id_if_error_with_first_batch_update( metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -669,6 +703,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_query(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -682,6 +720,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_query(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -698,6 +740,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_update(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -711,6 +757,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_update(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -732,6 +782,10 @@ def test_transaction_execute_sql_w_directed_read_options(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, @@ -755,6 +809,10 @@ def test_transaction_streaming_read_w_directed_read_options(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -771,6 +829,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_read(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -782,6 +844,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_read(self): metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -798,6 +864,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_batch_update(se metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -810,6 +880,10 @@ def test_transaction_should_use_transaction_id_returned_by_first_batch_update(se metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -850,6 +924,10 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), ], ) @@ -860,6 +938,10 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), ], ) @@ -868,6 +950,10 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), ], retry=RETRY, timeout=TIMEOUT, @@ -903,6 +989,7 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ thread.join() self._execute_update_helper(transaction=transaction, api=api) + self.assertEqual(api.execute_sql.call_count, 1) api.execute_sql.assert_any_call( request=self._execute_update_expected_request(database, begin=False), @@ -911,32 +998,46 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.3.1", + ), ], ) - api.execute_batch_dml.assert_any_call( - request=self._batch_update_expected_request(), - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), - ], - retry=RETRY, - timeout=TIMEOUT, - ) - - api.execute_batch_dml.assert_any_call( - request=self._batch_update_expected_request(begin=False), - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), + self.assertEqual(api.execute_batch_dml.call_count, 2) + self.assertEqual( + api.execute_batch_dml.call_args_list, + [ + mock.call( + request=self._batch_update_expected_request(), + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + retry=RETRY, + timeout=TIMEOUT, + ), + mock.call( + request=self._batch_update_expected_request(begin=False), + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], + retry=RETRY, + timeout=TIMEOUT, + ), ], - retry=RETRY, - timeout=TIMEOUT, ) - self.assertEqual(api.execute_sql.call_count, 1) - self.assertEqual(api.execute_batch_dml.call_count, 2) - def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_read( self, ): @@ -977,27 +1078,43 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.1.3.1", + ), ], ) - api.streaming_read.assert_any_call( - request=self._read_helper_expected_request(), - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), - ], - retry=RETRY, - timeout=TIMEOUT, - ) - - api.streaming_read.assert_any_call( - request=self._read_helper_expected_request(begin=False), - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), + self.assertEqual( + api.streaming_read.call_args_list, + [ + mock.call( + request=self._read_helper_expected_request(), + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + retry=RETRY, + timeout=TIMEOUT, + ), + mock.call( + request=self._read_helper_expected_request(begin=False), + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], + retry=RETRY, + timeout=TIMEOUT, + ), ], - retry=RETRY, - timeout=TIMEOUT, ) self.assertEqual(api.execute_sql.call_count, 1) @@ -1043,27 +1160,43 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_ metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.1.3.1", + ), ], ) - req = self._execute_sql_expected_request(database) - api.execute_streaming_sql.assert_any_call( - request=req, - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), - ], - retry=RETRY, - timeout=TIMEOUT, - ) - api.execute_streaming_sql.assert_any_call( - request=self._execute_sql_expected_request(database, begin=False), - metadata=[ - ("google-cloud-resource-prefix", database.name), - ("x-goog-spanner-route-to-leader", "true"), + self.assertEqual( + api.execute_streaming_sql.call_args_list, + [ + mock.call( + request=self._execute_sql_expected_request(database), + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], + retry=RETRY, + timeout=TIMEOUT, + ), + mock.call( + request=self._execute_sql_expected_request(database, begin=False), + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1", + ), + ], + retry=RETRY, + timeout=TIMEOUT, + ), ], - retry=RETRY, - timeout=TIMEOUT, ) self.assertEqual(api.execute_sql.call_count, 1) @@ -1079,19 +1212,33 @@ def test_transaction_should_execute_sql_with_route_to_leader_disabled(self): api.execute_streaming_sql.assert_called_once_with( request=self._execute_sql_expected_request(database=database), - metadata=[("google-cloud-resource-prefix", database.name)], + metadata=[ + ("google-cloud-resource-prefix", database.name), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1", + ), + ], timeout=TIMEOUT, retry=RETRY, ) class _Client(object): + NTH_CLIENT = AtomicCounter() + def __init__(self): from google.cloud.spanner_v1 import ExecuteSqlRequest self._query_options = ExecuteSqlRequest.QueryOptions(optimizer_version="1") self.directed_read_options = None self.default_transaction_options = DefaultTransactionOptions() + self._nth_client_id = _Client.NTH_CLIENT.increment() + self._nth_request = AtomicCounter() + + @property + def _next_nth_request(self): + return self._nth_request.increment() class _Instance(object): @@ -1107,6 +1254,30 @@ def __init__(self): self._directed_read_options = None self.default_transaction_options = DefaultTransactionOptions() + @property + def _next_nth_request(self): + return self._instance._client._next_nth_request + + @property + def _nth_client_id(self): + return self._instance._client._nth_client_id + + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + + @property + def _channel_id(self): + return 1 + class _Session(object): _transaction = None diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py index ddc91ea522..e477ef27c6 100644 --- a/tests/unit/test_transaction.py +++ b/tests/unit/test_transaction.py @@ -21,6 +21,11 @@ from google.cloud.spanner_v1 import TypeCode from google.api_core.retry import Retry from google.api_core import gapic_v1 +from google.cloud.spanner_v1._helpers import ( + AtomicCounter, + _metadata_with_request_id, +) +from google.cloud.spanner_v1.request_id_header import REQ_RAND_PROCESS_ID from tests._helpers import ( HAS_OPENTELEMETRY_INSTALLED, @@ -167,10 +172,13 @@ def test_begin_w_other_error(self): with self.assertRaises(RuntimeError): transaction.begin() + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.Transaction.begin", status=StatusCode.ERROR, - attributes=TestTransaction.BASE_ATTRIBUTES, + attributes=dict( + TestTransaction.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_begin_ok(self): @@ -192,16 +200,24 @@ def test_begin_ok(self): session_id, txn_options, metadata = api._begun self.assertEqual(session_id, session.name) self.assertTrue(type(txn_options).pb(txn_options).HasField("read_write")) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{_Client.NTH_CLIENT.value}.1.1.1" self.assertEqual( metadata, [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( - "CloudSpanner.Transaction.begin", attributes=TestTransaction.BASE_ATTRIBUTES + "CloudSpanner.Transaction.begin", + attributes=dict( + TestTransaction.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_begin_w_retry(self): @@ -271,10 +287,13 @@ def test_rollback_w_other_error(self): self.assertFalse(transaction.rolled_back) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertSpanAttributes( "CloudSpanner.Transaction.rollback", status=StatusCode.ERROR, - attributes=TestTransaction.BASE_ATTRIBUTES, + attributes=dict( + TestTransaction.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_rollback_ok(self): @@ -296,17 +315,24 @@ def test_rollback_ok(self): session_id, txn_id, metadata = api._rolled_back self.assertEqual(session_id, session.name) self.assertEqual(txn_id, self.TRANSACTION_ID) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertEqual( metadata, [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertSpanAttributes( "CloudSpanner.Transaction.rollback", - attributes=TestTransaction.BASE_ATTRIBUTES, + attributes=dict( + TestTransaction.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id + ), ) def test_commit_not_begun(self): @@ -417,10 +443,15 @@ def test_commit_w_other_error(self): self.assertIsNone(transaction.committed) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{_Client.NTH_CLIENT.value}.1.1.1" self.assertSpanAttributes( "CloudSpanner.Transaction.commit", status=StatusCode.ERROR, - attributes=dict(TestTransaction.BASE_ATTRIBUTES, num_mutations=1), + attributes=dict( + TestTransaction.BASE_ATTRIBUTES, + num_mutations=1, + x_goog_spanner_request_id=req_id, + ), ) def _commit_helper( @@ -487,11 +518,16 @@ def _commit_helper( self.assertEqual(session_id, session.name) self.assertEqual(txn_id, self.TRANSACTION_ID) self.assertEqual(mutations, transaction._mutations) + req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1" self.assertEqual( metadata, [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + req_id, + ), ], ) self.assertEqual(actual_request_options, expected_request_options) @@ -504,6 +540,7 @@ def _commit_helper( attributes=dict( TestTransaction.BASE_ATTRIBUTES, num_mutations=len(transaction._mutations), + x_goog_spanner_request_id=req_id, ), ) @@ -666,6 +703,10 @@ def _execute_update_helper( metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{_Client.NTH_CLIENT.value}.1.1.1", + ), ], ) @@ -859,6 +900,10 @@ def _batch_update_helper( metadata=[ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{_Client.NTH_CLIENT.value}.1.1.1", + ), ], retry=retry, timeout=timeout, @@ -974,6 +1019,10 @@ def test_context_mgr_success(self): [ ("google-cloud-resource-prefix", database.name), ("x-goog-spanner-route-to-leader", "true"), + ( + "x-goog-spanner-request-id", + f"1.{REQ_RAND_PROCESS_ID}.{_Client.NTH_CLIENT.value}.1.2.1", + ), ], ) @@ -1004,11 +1053,19 @@ def test_context_mgr_failure(self): class _Client(object): + NTH_CLIENT = AtomicCounter() + def __init__(self): from google.cloud.spanner_v1 import ExecuteSqlRequest self._query_options = ExecuteSqlRequest.QueryOptions(optimizer_version="1") self.directed_read_options = None + self._nth_client_id = _Client.NTH_CLIENT.increment() + self._nth_request = AtomicCounter() + + @property + def _next_nth_request(self): + return self._nth_request.increment() class _Instance(object): @@ -1024,6 +1081,30 @@ def __init__(self): self._directed_read_options = None self.default_transaction_options = DefaultTransactionOptions() + @property + def _next_nth_request(self): + return self._instance._client._next_nth_request + + @property + def _nth_client_id(self): + return self._instance._client._nth_client_id + + def metadata_with_request_id( + self, nth_request, nth_attempt, prior_metadata=[], span=None + ): + return _metadata_with_request_id( + self._nth_client_id, + self._channel_id, + nth_request, + nth_attempt, + prior_metadata, + span, + ) + + @property + def _channel_id(self): + return 1 + class _Session(object): _transaction = None pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy