From 00ccb7a5c1f246b5099265058a5e9875e6627024 Mon Sep 17 00:00:00 2001 From: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> Date: Thu, 20 Jun 2024 13:21:24 +0530 Subject: [PATCH 01/10] feat(spanner): add support for txn changstream exclusion (#1152) * feat(spanner): add support for txn changstream exclusion * feat(spanner): add tests for txn change streams exclusion * chore(spanner): lint fix * feat(spanner): add docs * feat(spanner): add test for ILB with change stream exclusion * feat(spanner): update default value and add optional --- google/cloud/spanner_v1/batch.py | 21 ++- google/cloud/spanner_v1/database.py | 43 +++++- google/cloud/spanner_v1/session.py | 8 ++ google/cloud/spanner_v1/transaction.py | 11 +- tests/unit/test_batch.py | 42 +++++- tests/unit/test_database.py | 16 ++- tests/unit/test_session.py | 185 +++++++++++++++++++++++++ tests/unit/test_spanner.py | 37 ++++- 8 files changed, 346 insertions(+), 17 deletions(-) diff --git a/google/cloud/spanner_v1/batch.py b/google/cloud/spanner_v1/batch.py index 9cb2afbc2c..e3d681189c 100644 --- a/google/cloud/spanner_v1/batch.py +++ b/google/cloud/spanner_v1/batch.py @@ -147,7 +147,11 @@ def _check_state(self): raise ValueError("Batch already committed") def commit( - self, return_commit_stats=False, request_options=None, max_commit_delay=None + self, + return_commit_stats=False, + request_options=None, + max_commit_delay=None, + exclude_txn_from_change_streams=False, ): """Commit mutations to the database. @@ -178,7 +182,10 @@ def commit( metadata.append( _metadata_with_leader_aware_routing(database._route_to_leader_enabled) ) - txn_options = TransactionOptions(read_write=TransactionOptions.ReadWrite()) + txn_options = TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=exclude_txn_from_change_streams, + ) trace_attributes = {"num_mutations": len(self._mutations)} if request_options is None: @@ -270,7 +277,7 @@ def group(self): self._mutation_groups.append(mutation_group) return MutationGroup(self._session, mutation_group.mutations) - def batch_write(self, request_options=None): + def batch_write(self, request_options=None, exclude_txn_from_change_streams=False): """Executes batch_write. :type request_options: @@ -280,6 +287,13 @@ def batch_write(self, request_options=None): If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.RequestOptions`. + :type exclude_txn_from_change_streams: bool + :param exclude_txn_from_change_streams: + (Optional) If true, instructs the transaction to be excluded from being recorded in change streams + with the DDL option `allow_txn_exclusion=true`. This does not exclude the transaction from + being recorded in the change streams with the DDL option `allow_txn_exclusion` being false or + unset. + :rtype: :class:`Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]` :returns: a sequence of responses for each batch. """ @@ -302,6 +316,7 @@ def batch_write(self, request_options=None): session=self._session.name, mutation_groups=self._mutation_groups, request_options=request_options, + exclude_txn_from_change_streams=exclude_txn_from_change_streams, ) with trace_call("CloudSpanner.BatchWrite", self._session, trace_attributes): method = functools.partial( diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 356bec413c..5b7c27b236 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -619,6 +619,7 @@ def execute_partitioned_dml( param_types=None, query_options=None, request_options=None, + exclude_txn_from_change_streams=False, ): """Execute a partitionable DML statement. @@ -651,6 +652,13 @@ def execute_partitioned_dml( Please note, the `transactionTag` setting will be ignored as it is not supported for partitioned DML. + :type exclude_txn_from_change_streams: bool + :param exclude_txn_from_change_streams: + (Optional) If true, instructs the transaction to be excluded from being recorded in change streams + with the DDL option `allow_txn_exclusion=true`. This does not exclude the transaction from + being recorded in the change streams with the DDL option `allow_txn_exclusion` being false or + unset. + :rtype: int :returns: Count of rows affected by the DML statement. """ @@ -673,7 +681,8 @@ def execute_partitioned_dml( api = self.spanner_api txn_options = TransactionOptions( - partitioned_dml=TransactionOptions.PartitionedDml() + partitioned_dml=TransactionOptions.PartitionedDml(), + exclude_txn_from_change_streams=exclude_txn_from_change_streams, ) metadata = _metadata_with_prefix(self.name) @@ -752,7 +761,12 @@ def snapshot(self, **kw): """ return SnapshotCheckout(self, **kw) - def batch(self, request_options=None, max_commit_delay=None): + def batch( + self, + request_options=None, + max_commit_delay=None, + exclude_txn_from_change_streams=False, + ): """Return an object which wraps a batch. The wrapper *must* be used as a context manager, with the batch @@ -771,10 +785,19 @@ def batch(self, request_options=None, max_commit_delay=None): in order to improve throughput. Value must be between 0ms and 500ms. + :type exclude_txn_from_change_streams: bool + :param exclude_txn_from_change_streams: + (Optional) If true, instructs the transaction to be excluded from being recorded in change streams + with the DDL option `allow_txn_exclusion=true`. This does not exclude the transaction from + being recorded in the change streams with the DDL option `allow_txn_exclusion` being false or + unset. + :rtype: :class:`~google.cloud.spanner_v1.database.BatchCheckout` :returns: new wrapper """ - return BatchCheckout(self, request_options, max_commit_delay) + return BatchCheckout( + self, request_options, max_commit_delay, exclude_txn_from_change_streams + ) def mutation_groups(self): """Return an object which wraps a mutation_group. @@ -840,6 +863,10 @@ def run_in_transaction(self, func, *args, **kw): "max_commit_delay" will be removed and used to set the max_commit_delay for the request. Value must be between 0ms and 500ms. + "exclude_txn_from_change_streams" if true, instructs the transaction to be excluded + from being recorded in change streams with the DDL option `allow_txn_exclusion=true`. + This does not exclude the transaction from being recorded in the change streams with + the DDL option `allow_txn_exclusion` being false or unset. :rtype: Any :returns: The return value of ``func``. @@ -1103,7 +1130,13 @@ class BatchCheckout(object): in order to improve throughput. """ - def __init__(self, database, request_options=None, max_commit_delay=None): + def __init__( + self, + database, + request_options=None, + max_commit_delay=None, + exclude_txn_from_change_streams=False, + ): self._database = database self._session = self._batch = None if request_options is None: @@ -1113,6 +1146,7 @@ def __init__(self, database, request_options=None, max_commit_delay=None): else: self._request_options = request_options self._max_commit_delay = max_commit_delay + self._exclude_txn_from_change_streams = exclude_txn_from_change_streams def __enter__(self): """Begin ``with`` block.""" @@ -1130,6 +1164,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): return_commit_stats=self._database.log_commit_stats, request_options=self._request_options, max_commit_delay=self._max_commit_delay, + exclude_txn_from_change_streams=self._exclude_txn_from_change_streams, ) finally: if self._database.log_commit_stats and self._batch.commit_stats: diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index 52994e58e2..28280282f4 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -387,6 +387,10 @@ def run_in_transaction(self, func, *args, **kw): request options for the commit request. "max_commit_delay" will be removed and used to set the max commit delay for the request. "transaction_tag" will be removed and used to set the transaction tag for the request. + "exclude_txn_from_change_streams" if true, instructs the transaction to be excluded + from being recorded in change streams with the DDL option `allow_txn_exclusion=true`. + This does not exclude the transaction from being recorded in the change streams with + the DDL option `allow_txn_exclusion` being false or unset. :rtype: Any :returns: The return value of ``func``. @@ -398,12 +402,16 @@ def run_in_transaction(self, func, *args, **kw): commit_request_options = kw.pop("commit_request_options", None) max_commit_delay = kw.pop("max_commit_delay", None) transaction_tag = kw.pop("transaction_tag", None) + exclude_txn_from_change_streams = kw.pop( + "exclude_txn_from_change_streams", None + ) attempts = 0 while True: if self._transaction is None: txn = self.transaction() txn.transaction_tag = transaction_tag + txn.exclude_txn_from_change_streams = exclude_txn_from_change_streams else: txn = self._transaction diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py index b02a43e8d2..ee1dd8ef3b 100644 --- a/google/cloud/spanner_v1/transaction.py +++ b/google/cloud/spanner_v1/transaction.py @@ -55,6 +55,7 @@ class Transaction(_SnapshotBase, _BatchBase): _execute_sql_count = 0 _lock = threading.Lock() _read_only = False + exclude_txn_from_change_streams = False def __init__(self, session): if session._transaction is not None: @@ -86,7 +87,10 @@ def _make_txn_selector(self): if self._transaction_id is None: return TransactionSelector( - begin=TransactionOptions(read_write=TransactionOptions.ReadWrite()) + begin=TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=self.exclude_txn_from_change_streams, + ) ) else: return TransactionSelector(id=self._transaction_id) @@ -137,7 +141,10 @@ def begin(self): metadata.append( _metadata_with_leader_aware_routing(database._route_to_leader_enabled) ) - txn_options = TransactionOptions(read_write=TransactionOptions.ReadWrite()) + txn_options = TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=self.exclude_txn_from_change_streams, + ) with trace_call("CloudSpanner.BeginTransaction", self._session): method = functools.partial( api.begin_transaction, diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py index 1c02e93f1d..ee96decf5e 100644 --- a/tests/unit/test_batch.py +++ b/tests/unit/test_batch.py @@ -259,7 +259,12 @@ def test_commit_ok(self): "CloudSpanner.Commit", attributes=dict(BASE_ATTRIBUTES, num_mutations=1) ) - def _test_commit_with_options(self, request_options=None, max_commit_delay_in=None): + def _test_commit_with_options( + self, + request_options=None, + max_commit_delay_in=None, + exclude_txn_from_change_streams=False, + ): import datetime from google.cloud.spanner_v1 import CommitResponse from google.cloud.spanner_v1 import TransactionOptions @@ -276,7 +281,9 @@ def _test_commit_with_options(self, request_options=None, max_commit_delay_in=No batch.transaction_tag = self.TRANSACTION_TAG batch.insert(TABLE_NAME, COLUMNS, VALUES) committed = batch.commit( - request_options=request_options, max_commit_delay=max_commit_delay_in + request_options=request_options, + max_commit_delay=max_commit_delay_in, + exclude_txn_from_change_streams=exclude_txn_from_change_streams, ) self.assertEqual(committed, now) @@ -301,6 +308,10 @@ def _test_commit_with_options(self, request_options=None, max_commit_delay_in=No self.assertEqual(mutations, batch._mutations) self.assertIsInstance(single_use_txn, TransactionOptions) self.assertTrue(type(single_use_txn).pb(single_use_txn).HasField("read_write")) + self.assertEqual( + single_use_txn.exclude_txn_from_change_streams, + exclude_txn_from_change_streams, + ) self.assertEqual( metadata, [ @@ -355,6 +366,14 @@ def test_commit_w_max_commit_delay(self): max_commit_delay_in=datetime.timedelta(milliseconds=100), ) + def test_commit_w_exclude_txn_from_change_streams(self): + request_options = RequestOptions( + request_tag="tag-1", + ) + self._test_commit_with_options( + request_options=request_options, exclude_txn_from_change_streams=True + ) + def test_context_mgr_already_committed(self): import datetime from google.cloud._helpers import UTC @@ -499,7 +518,9 @@ def test_batch_write_grpc_error(self): attributes=dict(BASE_ATTRIBUTES, num_mutation_groups=1), ) - def _test_batch_write_with_request_options(self, request_options=None): + def _test_batch_write_with_request_options( + self, request_options=None, exclude_txn_from_change_streams=False + ): import datetime from google.cloud.spanner_v1 import BatchWriteResponse from google.cloud._helpers import UTC @@ -519,7 +540,10 @@ def _test_batch_write_with_request_options(self, request_options=None): group = groups.group() group.insert(TABLE_NAME, COLUMNS, VALUES) - response_iter = groups.batch_write(request_options) + response_iter = groups.batch_write( + request_options, + exclude_txn_from_change_streams=exclude_txn_from_change_streams, + ) self.assertEqual(len(response_iter), 1) self.assertEqual(response_iter[0], response) @@ -528,6 +552,7 @@ def _test_batch_write_with_request_options(self, request_options=None): mutation_groups, actual_request_options, metadata, + request_exclude_txn_from_change_streams, ) = api._batch_request self.assertEqual(session, self.SESSION_NAME) self.assertEqual(mutation_groups, groups._mutation_groups) @@ -545,6 +570,9 @@ def _test_batch_write_with_request_options(self, request_options=None): else: expected_request_options = request_options self.assertEqual(actual_request_options, expected_request_options) + self.assertEqual( + request_exclude_txn_from_change_streams, exclude_txn_from_change_streams + ) self.assertSpanAttributes( "CloudSpanner.BatchWrite", @@ -567,6 +595,11 @@ def test_batch_write_w_incorrect_tag_dictionary_error(self): with self.assertRaises(ValueError): self._test_batch_write_with_request_options({"incorrect_tag": "tag-1-1"}) + def test_batch_write_w_exclude_txn_from_change_streams(self): + self._test_batch_write_with_request_options( + exclude_txn_from_change_streams=True + ) + class _Session(object): def __init__(self, database=None, name=TestBatch.SESSION_NAME): @@ -625,6 +658,7 @@ def batch_write( request.mutation_groups, request.request_options, metadata, + request.exclude_txn_from_change_streams, ) if self._rpc_error: raise Unknown("error") diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py index ec2983ff7e..90fa0c269f 100644 --- a/tests/unit/test_database.py +++ b/tests/unit/test_database.py @@ -1083,6 +1083,7 @@ def _execute_partitioned_dml_helper( query_options=None, request_options=None, retried=False, + exclude_txn_from_change_streams=False, ): from google.api_core.exceptions import Aborted from google.api_core.retry import Retry @@ -1129,13 +1130,19 @@ def _execute_partitioned_dml_helper( api.execute_streaming_sql.return_value = iterator row_count = database.execute_partitioned_dml( - dml, params, param_types, query_options, request_options + dml, + params, + param_types, + query_options, + request_options, + exclude_txn_from_change_streams, ) self.assertEqual(row_count, 2) txn_options = TransactionOptions( - partitioned_dml=TransactionOptions.PartitionedDml() + partitioned_dml=TransactionOptions.PartitionedDml(), + exclude_txn_from_change_streams=exclude_txn_from_change_streams, ) api.begin_transaction.assert_called_with( @@ -1250,6 +1257,11 @@ def test_execute_partitioned_dml_w_req_tag_used(self): def test_execute_partitioned_dml_wo_params_retry_aborted(self): self._execute_partitioned_dml_helper(dml=DML_WO_PARAM, retried=True) + def test_execute_partitioned_dml_w_exclude_txn_from_change_streams(self): + self._execute_partitioned_dml_helper( + dml=DML_WO_PARAM, exclude_txn_from_change_streams=True + ) + def test_session_factory_defaults(self): from google.cloud.spanner_v1.session import Session diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index 917e875f22..d4052f0ae3 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -1696,6 +1696,191 @@ def unit_of_work(txn, *args, **kw): ], ) + def test_run_in_transaction_w_exclude_txn_from_change_streams(self): + import datetime + from google.cloud.spanner_v1 import CommitRequest + from google.cloud.spanner_v1 import CommitResponse + from google.cloud.spanner_v1 import ( + Transaction as TransactionPB, + TransactionOptions, + ) + from google.cloud._helpers import UTC + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.spanner_v1.transaction import Transaction + + TABLE_NAME = "citizens" + COLUMNS = ["email", "first_name", "last_name", "age"] + VALUES = [ + ["phred@exammple.com", "Phred", "Phlyntstone", 32], + ["bharney@example.com", "Bharney", "Rhubble", 31], + ] + TRANSACTION_ID = b"FACEDACE" + transaction_pb = TransactionPB(id=TRANSACTION_ID) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) + now_pb = _datetime_to_pb_timestamp(now) + commit_stats = CommitResponse.CommitStats(mutation_count=4) + response = CommitResponse(commit_timestamp=now_pb, commit_stats=commit_stats) + gax_api = self._make_spanner_api() + gax_api.begin_transaction.return_value = transaction_pb + gax_api.commit.return_value = response + database = self._make_database() + database.spanner_api = gax_api + session = self._make_one(database) + session._session_id = self.SESSION_ID + + called_with = [] + + def unit_of_work(txn, *args, **kw): + called_with.append((txn, args, kw)) + txn.insert(TABLE_NAME, COLUMNS, VALUES) + return 42 + + return_value = session.run_in_transaction( + unit_of_work, "abc", exclude_txn_from_change_streams=True + ) + + self.assertIsNone(session._transaction) + self.assertEqual(len(called_with), 1) + txn, args, kw = called_with[0] + self.assertIsInstance(txn, Transaction) + self.assertEqual(return_value, 42) + self.assertEqual(args, ("abc",)) + + expected_options = TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=True, + ) + gax_api.begin_transaction.assert_called_once_with( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ], + ) + request = CommitRequest( + session=self.SESSION_NAME, + mutations=txn._mutations, + transaction_id=TRANSACTION_ID, + request_options=RequestOptions(), + ) + gax_api.commit.assert_called_once_with( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ], + ) + + def test_run_in_transaction_w_abort_w_retry_metadata_w_exclude_txn_from_change_streams( + self, + ): + import datetime + from google.api_core.exceptions import Aborted + from google.protobuf.duration_pb2 import Duration + from google.rpc.error_details_pb2 import RetryInfo + from google.cloud.spanner_v1 import CommitRequest + from google.cloud.spanner_v1 import CommitResponse + from google.cloud.spanner_v1 import ( + Transaction as TransactionPB, + TransactionOptions, + ) + from google.cloud._helpers import UTC + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.spanner_v1.transaction import Transaction + + TABLE_NAME = "citizens" + COLUMNS = ["email", "first_name", "last_name", "age"] + VALUES = [ + ["phred@exammple.com", "Phred", "Phlyntstone", 32], + ["bharney@example.com", "Bharney", "Rhubble", 31], + ] + TRANSACTION_ID = b"FACEDACE" + RETRY_SECONDS = 12 + RETRY_NANOS = 3456 + retry_info = RetryInfo( + retry_delay=Duration(seconds=RETRY_SECONDS, nanos=RETRY_NANOS) + ) + trailing_metadata = [ + ("google.rpc.retryinfo-bin", retry_info.SerializeToString()) + ] + aborted = _make_rpc_error(Aborted, trailing_metadata=trailing_metadata) + transaction_pb = TransactionPB(id=TRANSACTION_ID) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) + now_pb = _datetime_to_pb_timestamp(now) + response = CommitResponse(commit_timestamp=now_pb) + gax_api = self._make_spanner_api() + gax_api.begin_transaction.return_value = transaction_pb + gax_api.commit.side_effect = [aborted, response] + database = self._make_database() + database.spanner_api = gax_api + session = self._make_one(database) + session._session_id = self.SESSION_ID + + called_with = [] + + def unit_of_work(txn, *args, **kw): + called_with.append((txn, args, kw)) + txn.insert(TABLE_NAME, COLUMNS, VALUES) + + with mock.patch("time.sleep") as sleep_mock: + session.run_in_transaction( + unit_of_work, + "abc", + some_arg="def", + exclude_txn_from_change_streams=True, + ) + + sleep_mock.assert_called_once_with(RETRY_SECONDS + RETRY_NANOS / 1.0e9) + self.assertEqual(len(called_with), 2) + + for index, (txn, args, kw) in enumerate(called_with): + self.assertIsInstance(txn, Transaction) + if index == 1: + self.assertEqual(txn.committed, now) + else: + self.assertIsNone(txn.committed) + self.assertEqual(args, ("abc",)) + self.assertEqual(kw, {"some_arg": "def"}) + + expected_options = TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=True, + ) + self.assertEqual( + gax_api.begin_transaction.call_args_list, + [ + mock.call( + session=self.SESSION_NAME, + options=expected_options, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ], + ) + ] + * 2, + ) + request = CommitRequest( + session=self.SESSION_NAME, + mutations=txn._mutations, + transaction_id=TRANSACTION_ID, + request_options=RequestOptions(), + ) + self.assertEqual( + gax_api.commit.call_args_list, + [ + mock.call( + request=request, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ], + ) + ] + * 2, + ) + def test_delay_helper_w_no_delay(self): from google.cloud.spanner_v1.session import _delay_until_retry diff --git a/tests/unit/test_spanner.py b/tests/unit/test_spanner.py index 0c7feed5ac..ab5479eb3c 100644 --- a/tests/unit/test_spanner.py +++ b/tests/unit/test_spanner.py @@ -137,6 +137,7 @@ def _execute_update_helper( api, count=0, query_options=None, + exclude_txn_from_change_streams=False, ): stats_pb = ResultSetStats(row_count_exact=1) @@ -145,6 +146,7 @@ def _execute_update_helper( api.execute_sql.return_value = ResultSet(stats=stats_pb, metadata=metadata_pb) transaction.transaction_tag = self.TRANSACTION_TAG + transaction.exclude_txn_from_change_streams = exclude_txn_from_change_streams transaction._execute_sql_count = count row_count = transaction.execute_update( @@ -160,11 +162,19 @@ def _execute_update_helper( self.assertEqual(row_count, count + 1) def _execute_update_expected_request( - self, database, query_options=None, begin=True, count=0 + self, + database, + query_options=None, + begin=True, + count=0, + exclude_txn_from_change_streams=False, ): if begin is True: expected_transaction = TransactionSelector( - begin=TransactionOptions(read_write=TransactionOptions.ReadWrite()) + begin=TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=exclude_txn_from_change_streams, + ) ) else: expected_transaction = TransactionSelector(id=self.TRANSACTION_ID) @@ -560,6 +570,29 @@ def test_transaction_should_include_begin_with_first_batch_update(self): timeout=TIMEOUT, ) + def test_transaction_should_include_begin_w_exclude_txn_from_change_streams_with_first_update( + self, + ): + database = _Database() + session = _Session(database) + api = database.spanner_api = self._make_spanner_api() + transaction = self._make_one(session) + self._execute_update_helper( + transaction=transaction, api=api, exclude_txn_from_change_streams=True + ) + + api.execute_sql.assert_called_once_with( + request=self._execute_update_expected_request( + database=database, exclude_txn_from_change_streams=True + ), + retry=RETRY, + timeout=TIMEOUT, + metadata=[ + ("google-cloud-resource-prefix", database.name), + ("x-goog-spanner-route-to-leader", "true"), + ], + ) + def test_transaction_should_use_transaction_id_if_error_with_first_batch_update( self, ): From d00e9667a431aa23179e9f05d8fdfc6ac9c212d6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 8 Jul 2024 13:36:57 -0400 Subject: [PATCH 02/10] chore: update templated files (#1156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update templated files * update replacements in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * clean up replacement * update replacement in owlbot.py * update replacement in owlbot.py --------- Co-authored-by: Owl Bot --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 3 +- .github/auto-label.yaml | 2 +- .kokoro/build.sh | 2 +- .kokoro/docker/docs/Dockerfile | 2 +- .kokoro/populate-secrets.sh | 2 +- .kokoro/publish-docs.sh | 2 +- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 509 ++++++++++++++------------- .kokoro/test-samples-against-head.sh | 2 +- .kokoro/test-samples.sh | 2 +- .kokoro/trampoline.sh | 2 +- .kokoro/trampoline_v2.sh | 2 +- .pre-commit-config.yaml | 2 +- .trampolinerc | 2 +- MANIFEST.in | 2 +- docs/conf.py | 2 +- noxfile.py | 99 ++++-- owlbot.py | 69 +++- scripts/decrypt-secrets.sh | 2 +- scripts/readme-gen/readme_gen.py | 2 +- 21 files changed, 406 insertions(+), 308 deletions(-) diff --git a/.flake8 b/.flake8 index 87f6e408c4..32986c7928 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 81f87c5691..6201596218 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 8b37ee8971..21786a4eb0 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/build.sh b/.kokoro/build.sh index bacf3e9687..7ddfe694b0 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index bdaf39fe22..a26ce61930 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index 6f3972140e..c435402f47 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 9eafe0be3b..38f083f05a 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 3c18c6d410..a0c05f4a6e 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 51f92b8e12..35ece0e4d2 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 63ac41dfae..e9d8bd79a6 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 50b35a48c1..7933d82014 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index d85b1f2676..48f7969970 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 59a7cf3a93..35fa529231 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a8e169506..1d74695f70 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index a7dfeb42c6..0080152373 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/MANIFEST.in b/MANIFEST.in index e0a6670531..d6814cd600 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/conf.py b/docs/conf.py index ea1791e9a7..78e49ed55c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index ea452e3e93..7b275246a0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -160,22 +160,22 @@ def install_unittest_dependencies(session, *constraints): else: session.install("-e", ".", *constraints) - -def default(session): - # Install all test dependencies, then install this package in-place. - + # XXX Work around Kokoro image's older pip, which borks the OT install. + session.run("pip", "install", "--upgrade", "pip") constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - install_unittest_dependencies(session, "-c", constraints_path) + session.install("-e", ".[tracing]", "-c", constraints_path) + # XXX: Dump installed versions to debug OT issue + session.run("pip", "list") - # Run py.test against the unit tests. + # Run py.test against the unit tests with OpenTelemetry. session.run( "py.test", "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", + "--cov=google.cloud.spanner", + "--cov=google.cloud", + "--cov=tests.unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", @@ -184,34 +184,48 @@ def default(session): *session.posargs, ) - # XXX Work around Kokoro image's older pip, which borks the OT install. - session.run("pip", "install", "--upgrade", "pip") - session.install("-e", ".[tracing]", "-c", constraints_path) - # XXX: Dump installed versions to debug OT issue - session.run("pip", "list") - # Run py.test against the unit tests with OpenTelemetry. +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.spanner", - "--cov=google.cloud", - "--cov=tests.unit", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -399,11 +413,24 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -@nox.parametrize("database_dialect", ["GOOGLE_STANDARD_SQL", "POSTGRESQL"]) -def prerelease_deps(session, database_dialect): +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation,database_dialect", + [ + ("python", "GOOGLE_STANDARD_SQL"), + ("python", "POSTGRESQL"), + ("upb", "GOOGLE_STANDARD_SQL"), + ("upb", "POSTGRESQL"), + ("cpp", "GOOGLE_STANDARD_SQL"), + ("cpp", "POSTGRESQL"), + ], +) +def prerelease_deps(session, protobuf_implementation, database_dialect): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -438,9 +465,9 @@ def prerelease_deps(session, database_dialect): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -466,7 +493,15 @@ def prerelease_deps(session, database_dialect): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + "SPANNER_DATABASE_DIALECT": database_dialect, + "SKIP_BACKUP_TESTS": "true", + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -480,6 +515,7 @@ def prerelease_deps(session, database_dialect): system_test_path, *session.posargs, env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, "SPANNER_DATABASE_DIALECT": database_dialect, "SKIP_BACKUP_TESTS": "true", }, @@ -492,6 +528,7 @@ def prerelease_deps(session, database_dialect): system_test_folder_path, *session.posargs, env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, "SPANNER_DATABASE_DIALECT": database_dialect, "SKIP_BACKUP_TESTS": "true", }, diff --git a/owlbot.py b/owlbot.py index 4ef3686ce8..e9c12e593c 100644 --- a/owlbot.py +++ b/owlbot.py @@ -177,6 +177,9 @@ def place_before(path, text, *before_text, escape=None): open_telemetry_test = """ # XXX Work around Kokoro image's older pip, which borks the OT install. session.run("pip", "install", "--upgrade", "pip") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) session.install("-e", ".[tracing]", "-c", constraints_path) # XXX: Dump installed versions to debug OT issue session.run("pip", "list") @@ -239,33 +242,69 @@ def system\(session\):""", def system(session, database_dialect):""", ) -s.replace("noxfile.py", - """system_test_path, - \*session.posargs,""", - """system_test_path, - *session.posargs, +s.replace( + "noxfile.py", + """\*session.posargs, + \)""", + """*session.posargs, env={ "SPANNER_DATABASE_DIALECT": database_dialect, "SKIP_BACKUP_TESTS": "true", - },""" + }, + )""", ) s.replace("noxfile.py", - """system_test_folder_path, - \*session.posargs,""", - """system_test_folder_path, - *session.posargs, - env={ + """env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + },""", + """env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, "SPANNER_DATABASE_DIALECT": database_dialect, "SKIP_BACKUP_TESTS": "true", - },""" + },""", +) + +s.replace("noxfile.py", +"""session.run\( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + \)""", +"""session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + "SPANNER_DATABASE_DIALECT": database_dialect, + "SKIP_BACKUP_TESTS": "true", + }, + )""", ) s.replace( "noxfile.py", - """def prerelease_deps\(session\):""", - """@nox.parametrize("database_dialect", ["GOOGLE_STANDARD_SQL", "POSTGRESQL"]) -def prerelease_deps(session, database_dialect):""" + """\@nox.session\(python="3.12"\) +\@nox.parametrize\( + "protobuf_implementation", + \[ "python", "upb", "cpp" \], +\) +def prerelease_deps\(session, protobuf_implementation\):""", + """@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation,database_dialect", + [ + ("python", "GOOGLE_STANDARD_SQL"), + ("python", "POSTGRESQL"), + ("upb", "GOOGLE_STANDARD_SQL"), + ("upb", "POSTGRESQL"), + ("cpp", "GOOGLE_STANDARD_SQL"), + ("cpp", "POSTGRESQL"), + ], +) +def prerelease_deps(session, protobuf_implementation, database_dialect):""", ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 0018b421dd..120b0ddc43 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 1acc119835..8f5e248a0d 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 112e74e6f15dc883dc347e82c890251c53a51a02 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Jul 2024 13:05:06 -0400 Subject: [PATCH 03/10] chore(python): use python 3.10 for docs build (#1160) Source-Link: https://github.com/googleapis/synthtool/commit/9ae07858520bf035a3d5be569b5a65d960ee4392 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 3 +- .kokoro/docker/docs/Dockerfile | 21 +++++++------ .kokoro/docker/docs/requirements.txt | 40 +++++++++++++----------- .kokoro/requirements.txt | 46 ++++++++++++++-------------- noxfile.py | 2 +- 5 files changed, 60 insertions(+), 52 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 6201596218..f30cb3775a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e +# created: 2024-07-08T19:25:35.862283192Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index a26ce61930..5205308b33 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index 0e5d70f20f..7129c77155 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 35ece0e4d2..9622baf0ba 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.6.2 \ - --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ - --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -371,23 +371,23 @@ more-itertools==10.3.0 \ # via # jaraco-classes # jaraco-functools -nh3==0.2.17 \ - --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ - --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ - --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ - --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ - --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ - --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ - --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ - --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ - --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ - --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ - --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ - --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ - --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ - --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ - --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ - --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer nox==2024.4.15 \ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ @@ -460,9 +460,9 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==43.0 \ - --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ - --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ diff --git a/noxfile.py b/noxfile.py index 7b275246a0..3b656a758c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -332,7 +332,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" From ac8a004faf95e4b0d88895d4578061a2f648a16b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 11 Jul 2024 09:35:27 +0200 Subject: [PATCH 04/10] chore(deps): update all dependencies (#1161) --- .devcontainer/requirements.txt | 36 +++++++++++++-------------- samples/samples/requirements-test.txt | 2 +- samples/samples/requirements.txt | 2 +- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/.devcontainer/requirements.txt b/.devcontainer/requirements.txt index 4abbd91012..8f8ce39776 100644 --- a/.devcontainer/requirements.txt +++ b/.devcontainer/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,23 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.4 \ - --hash=sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f \ - --hash=sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4 +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/samples/samples/requirements-test.txt b/samples/samples/requirements-test.txt index 17a4519faf..ba323d2852 100644 --- a/samples/samples/requirements-test.txt +++ b/samples/samples/requirements-test.txt @@ -1,4 +1,4 @@ -pytest==8.1.1 +pytest==8.2.2 pytest-dependency==0.6.0 mock==5.1.0 google-cloud-testutils==1.4.0 diff --git a/samples/samples/requirements.txt b/samples/samples/requirements.txt index 26f59dcbe7..3058d80948 100644 --- a/samples/samples/requirements.txt +++ b/samples/samples/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-spanner==3.44.0 +google-cloud-spanner==3.47.0 futures==3.4.0; python_version < "3" From 9609ad96d062fbd8fa4d622bfe8da119329facc0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Jul 2024 11:05:07 -0400 Subject: [PATCH 05/10] fix: Allow protobuf 5.x (#1144) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add field order_by in spanner.proto feat: add field lock_hint in spanner.proto PiperOrigin-RevId: 636759139 Source-Link: https://github.com/googleapis/googleapis/commit/eeed69d446a90eb4a4a2d1762c49d637075390c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8b4c5dae2157cd683a9229d40de8c71665c21a0a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGI0YzVkYWUyMTU3Y2Q2ODNhOTIyOWQ0MGRlOGM3MTY2NWMyMWEwYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Allow protobuf 5.x --------- Co-authored-by: Owl Bot Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../spanner_admin_database_v1/__init__.py | 20 + .../gapic_metadata.json | 75 + .../services/database_admin/async_client.py | 598 ++ .../services/database_admin/client.py | 602 ++ .../services/database_admin/pagers.py | 129 + .../database_admin/transports/base.py | 139 +- .../database_admin/transports/grpc.py | 148 +- .../database_admin/transports/grpc_asyncio.py | 226 +- .../database_admin/transports/rest.py | 723 +- .../types/__init__.py | 22 + .../spanner_admin_database_v1/types/backup.py | 27 + .../types/backup_schedule.py | 354 + .../services/instance_admin/async_client.py | 1 + .../instance_admin/transports/base.py | 4 +- .../instance_admin/transports/grpc.py | 3 +- .../instance_admin/transports/grpc_asyncio.py | 3 +- .../services/spanner/async_client.py | 1 + .../services/spanner/transports/base.py | 4 +- .../services/spanner/transports/grpc.py | 3 +- .../spanner/transports/grpc_asyncio.py | 3 +- google/cloud/spanner_v1/types/spanner.py | 102 + ...data_google.spanner.admin.database.v1.json | 1065 ++- ...data_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- ...base_admin_create_backup_schedule_async.py | 53 + ...abase_admin_create_backup_schedule_sync.py | 53 + ...base_admin_delete_backup_schedule_async.py | 50 + ...abase_admin_delete_backup_schedule_sync.py | 50 + ...atabase_admin_get_backup_schedule_async.py | 52 + ...database_admin_get_backup_schedule_sync.py | 52 + ...abase_admin_list_backup_schedules_async.py | 53 + ...tabase_admin_list_backup_schedules_sync.py | 53 + ...base_admin_update_backup_schedule_async.py | 51 + ...abase_admin_update_backup_schedule_sync.py | 51 + ...ixup_spanner_admin_database_v1_keywords.py | 5 + scripts/fixup_spanner_v1_keywords.py | 4 +- setup.py | 2 +- .../test_database_admin.py | 8337 ++++++++++++----- .../test_instance_admin.py | 170 +- tests/unit/gapic/spanner_v1/test_spanner.py | 118 +- 40 files changed, 10826 insertions(+), 2584 deletions(-) create mode 100644 google/cloud/spanner_admin_database_v1/types/backup_schedule.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py diff --git a/google/cloud/spanner_admin_database_v1/__init__.py b/google/cloud/spanner_admin_database_v1/__init__.py index a14af051d6..74715d1e44 100644 --- a/google/cloud/spanner_admin_database_v1/__init__.py +++ b/google/cloud/spanner_admin_database_v1/__init__.py @@ -30,12 +30,22 @@ from .types.backup import CreateBackupMetadata from .types.backup import CreateBackupRequest from .types.backup import DeleteBackupRequest +from .types.backup import FullBackupSpec from .types.backup import GetBackupRequest from .types.backup import ListBackupOperationsRequest from .types.backup import ListBackupOperationsResponse from .types.backup import ListBackupsRequest from .types.backup import ListBackupsResponse from .types.backup import UpdateBackupRequest +from .types.backup_schedule import BackupSchedule +from .types.backup_schedule import BackupScheduleSpec +from .types.backup_schedule import CreateBackupScheduleRequest +from .types.backup_schedule import CrontabSpec +from .types.backup_schedule import DeleteBackupScheduleRequest +from .types.backup_schedule import GetBackupScheduleRequest +from .types.backup_schedule import ListBackupSchedulesRequest +from .types.backup_schedule import ListBackupSchedulesResponse +from .types.backup_schedule import UpdateBackupScheduleRequest from .types.common import EncryptionConfig from .types.common import EncryptionInfo from .types.common import OperationProgress @@ -70,29 +80,38 @@ "DatabaseAdminAsyncClient", "Backup", "BackupInfo", + "BackupSchedule", + "BackupScheduleSpec", "CopyBackupEncryptionConfig", "CopyBackupMetadata", "CopyBackupRequest", "CreateBackupEncryptionConfig", "CreateBackupMetadata", "CreateBackupRequest", + "CreateBackupScheduleRequest", "CreateDatabaseMetadata", "CreateDatabaseRequest", + "CrontabSpec", "Database", "DatabaseAdminClient", "DatabaseDialect", "DatabaseRole", "DdlStatementActionInfo", "DeleteBackupRequest", + "DeleteBackupScheduleRequest", "DropDatabaseRequest", "EncryptionConfig", "EncryptionInfo", + "FullBackupSpec", "GetBackupRequest", + "GetBackupScheduleRequest", "GetDatabaseDdlRequest", "GetDatabaseDdlResponse", "GetDatabaseRequest", "ListBackupOperationsRequest", "ListBackupOperationsResponse", + "ListBackupSchedulesRequest", + "ListBackupSchedulesResponse", "ListBackupsRequest", "ListBackupsResponse", "ListDatabaseOperationsRequest", @@ -109,6 +128,7 @@ "RestoreInfo", "RestoreSourceType", "UpdateBackupRequest", + "UpdateBackupScheduleRequest", "UpdateDatabaseDdlMetadata", "UpdateDatabaseDdlRequest", "UpdateDatabaseMetadata", diff --git a/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/google/cloud/spanner_admin_database_v1/gapic_metadata.json index b0fb4f1384..e6096e59a2 100644 --- a/google/cloud/spanner_admin_database_v1/gapic_metadata.json +++ b/google/cloud/spanner_admin_database_v1/gapic_metadata.json @@ -20,6 +20,11 @@ "create_backup" ] }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, "CreateDatabase": { "methods": [ "create_database" @@ -30,6 +35,11 @@ "delete_backup" ] }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, "DropDatabase": { "methods": [ "drop_database" @@ -40,6 +50,11 @@ "get_backup" ] }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, "GetDatabase": { "methods": [ "get_database" @@ -60,6 +75,11 @@ "list_backup_operations" ] }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, "ListBackups": { "methods": [ "list_backups" @@ -100,6 +120,11 @@ "update_backup" ] }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, "UpdateDatabase": { "methods": [ "update_database" @@ -125,6 +150,11 @@ "create_backup" ] }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, "CreateDatabase": { "methods": [ "create_database" @@ -135,6 +165,11 @@ "delete_backup" ] }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, "DropDatabase": { "methods": [ "drop_database" @@ -145,6 +180,11 @@ "get_backup" ] }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, "GetDatabase": { "methods": [ "get_database" @@ -165,6 +205,11 @@ "list_backup_operations" ] }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, "ListBackups": { "methods": [ "list_backups" @@ -205,6 +250,11 @@ "update_backup" ] }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, "UpdateDatabase": { "methods": [ "update_database" @@ -230,6 +280,11 @@ "create_backup" ] }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, "CreateDatabase": { "methods": [ "create_database" @@ -240,6 +295,11 @@ "delete_backup" ] }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, "DropDatabase": { "methods": [ "drop_database" @@ -250,6 +310,11 @@ "get_backup" ] }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, "GetDatabase": { "methods": [ "get_database" @@ -270,6 +335,11 @@ "list_backup_operations" ] }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, "ListBackups": { "methods": [ "list_backups" @@ -310,6 +380,11 @@ "update_backup" ] }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, "UpdateDatabase": { "methods": [ "update_database" diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index e2b2143c82..89c9f4e972 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -38,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -48,12 +49,17 @@ from google.cloud.spanner_admin_database_v1.services.database_admin import pagers from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import common from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -84,6 +90,10 @@ class DatabaseAdminAsyncClient: backup_path = staticmethod(DatabaseAdminClient.backup_path) parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) + backup_schedule_path = staticmethod(DatabaseAdminClient.backup_schedule_path) + parse_backup_schedule_path = staticmethod( + DatabaseAdminClient.parse_backup_schedule_path + ) crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) @@ -2964,6 +2974,594 @@ async def sample_list_database_roles(): # Done; return the response. return response + async def create_backup_schedule( + self, + request: Optional[ + Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + backup_schedule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Creates a new backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]]): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + parent (:class:`str`): + Required. The name of the database + that this backup schedule applies to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule_id (:class:`str`): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the + full backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``backup_schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_schedule, backup_schedule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): + request = gsad_backup_schedule.CreateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if backup_schedule_id is not None: + request.backup_schedule_id = backup_schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_schedule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_schedule( + self, + request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_schedule.BackupSchedule: + r"""Gets backup schedule for the input schedule name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]]): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + name (:class:`str`): + Required. The name of the schedule to retrieve. Values + are of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.GetBackupScheduleRequest): + request = backup_schedule.GetBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_schedule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_schedule( + self, + request: Optional[ + Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict] + ] = None, + *, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]]): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated + as specified by ``update_mask`` are required. Other + fields are ignored. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which + fields in the BackupSchedule resource + should be updated. This mask is relative + to the BackupSchedule resource, not to + the request message. The field mask must + always be specified; this prevents any + future fields from being erased + accidentally. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_schedule, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): + request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_schedule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_schedule.name", request.backup_schedule.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_schedule( + self, + request: Optional[ + Union[backup_schedule.DeleteBackupScheduleRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]]): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + name (:class:`str`): + Required. The name of the schedule to delete. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): + request = backup_schedule.DeleteBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_schedule + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_backup_schedules( + self, + request: Optional[ + Union[backup_schedule.ListBackupSchedulesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupSchedulesAsyncPager: + r"""Lists all the backup schedules for the database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]]): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + parent (:class:`str`): + Required. Database is the parent + resource whose backup schedules should + be listed. Values are of the form + projects//instances//databases/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): + request = backup_schedule.ListBackupSchedulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_schedules + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupSchedulesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index 2be2266f45..00fe12755a 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -53,12 +53,17 @@ from google.cloud.spanner_admin_database_v1.services.database_admin import pagers from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import common from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -222,6 +227,30 @@ def parse_backup_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def backup_schedule_path( + project: str, + instance: str, + database: str, + schedule: str, + ) -> str: + """Returns a fully-qualified backup_schedule string.""" + return "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format( + project=project, + instance=instance, + database=database, + schedule=schedule, + ) + + @staticmethod + def parse_backup_schedule_path(path: str) -> Dict[str, str]: + """Parses a backup_schedule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_path( project: str, @@ -3433,6 +3462,579 @@ def sample_list_database_roles(): # Done; return the response. return response + def create_backup_schedule( + self, + request: Optional[ + Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + backup_schedule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Creates a new backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + parent (str): + Required. The name of the database + that this backup schedule applies to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule_id (str): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the + full backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``backup_schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_schedule, backup_schedule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): + request = gsad_backup_schedule.CreateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if backup_schedule_id is not None: + request.backup_schedule_id = backup_schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_schedule( + self, + request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_schedule.BackupSchedule: + r"""Gets backup schedule for the input schedule name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + name (str): + Required. The name of the schedule to retrieve. Values + are of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.GetBackupScheduleRequest): + request = backup_schedule.GetBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_schedule( + self, + request: Optional[ + Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict] + ] = None, + *, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated + as specified by ``update_mask`` are required. Other + fields are ignored. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which + fields in the BackupSchedule resource + should be updated. This mask is relative + to the BackupSchedule resource, not to + the request message. The field mask must + always be specified; this prevents any + future fields from being erased + accidentally. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_schedule, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): + request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_schedule.name", request.backup_schedule.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_schedule( + self, + request: Optional[ + Union[backup_schedule.DeleteBackupScheduleRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + name (str): + Required. The name of the schedule to delete. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): + request = backup_schedule.DeleteBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_backup_schedules( + self, + request: Optional[ + Union[backup_schedule.ListBackupSchedulesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupSchedulesPager: + r"""Lists all the backup schedules for the database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + parent (str): + Required. Database is the parent + resource whose backup schedules should + be listed. Values are of the form + projects//instances//databases/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): + request = backup_schedule.ListBackupSchedulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupSchedulesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DatabaseAdminClient": return self diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py index 3efd19e231..e5c9f15526 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -25,6 +25,7 @@ ) from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.longrunning import operations_pb2 # type: ignore @@ -677,3 +678,131 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupSchedulesPager: + """A pager for iterating through ``list_backup_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_schedules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupSchedules`` requests and continue to iterate + through the ``backup_schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backup_schedule.ListBackupSchedulesResponse], + request: backup_schedule.ListBackupSchedulesRequest, + response: backup_schedule.ListBackupSchedulesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup_schedule.ListBackupSchedulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup_schedule.ListBackupSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup_schedule.BackupSchedule]: + for page in self.pages: + yield from page.backup_schedules + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupSchedulesAsyncPager: + """A pager for iterating through ``list_backup_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_schedules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupSchedules`` requests and continue to iterate + through the ``backup_schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backup_schedule.ListBackupSchedulesResponse]], + request: backup_schedule.ListBackupSchedulesRequest, + response: backup_schedule.ListBackupSchedulesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup_schedule.ListBackupSchedulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup_schedule.ListBackupSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[backup_schedule.BackupSchedule]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_schedules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py index 65c68857cf..a520507904 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -29,6 +29,10 @@ from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -92,6 +96,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -104,7 +110,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) @@ -377,6 +383,81 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.create_backup_schedule: gapic_v1.method.wrap_method( + self.create_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup_schedule: gapic_v1.method.wrap_method( + self.get_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup_schedule: gapic_v1.method.wrap_method( + self.update_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup_schedule: gapic_v1.method.wrap_method( + self.delete_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_schedules: gapic_v1.method.wrap_method( + self.list_backup_schedules, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), } def close(self): @@ -591,6 +672,62 @@ def list_database_roles( ]: raise NotImplementedError() + @property + def create_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + Union[ + gsad_backup_schedule.BackupSchedule, + Awaitable[gsad_backup_schedule.BackupSchedule], + ], + ]: + raise NotImplementedError() + + @property + def get_backup_schedule( + self, + ) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + Union[ + backup_schedule.BackupSchedule, Awaitable[backup_schedule.BackupSchedule] + ], + ]: + raise NotImplementedError() + + @property + def update_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + Union[ + gsad_backup_schedule.BackupSchedule, + Awaitable[gsad_backup_schedule.BackupSchedule], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_schedule( + self, + ) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_backup_schedules( + self, + ) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + Union[ + backup_schedule.ListBackupSchedulesResponse, + Awaitable[backup_schedule.ListBackupSchedulesResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py index 7b19fdd1c3..344b0c8d25 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -27,6 +27,10 @@ from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -137,7 +141,8 @@ def __init__( if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -959,6 +964,147 @@ def list_database_roles( ) return self._stubs["list_database_roles"] + @property + def create_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule, + ]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a new backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_schedule" not in self._stubs: + self._stubs["create_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule", + request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs["create_backup_schedule"] + + @property + def get_backup_schedule( + self, + ) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], backup_schedule.BackupSchedule + ]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets backup schedule for the input schedule name. + + Returns: + Callable[[~.GetBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_schedule" not in self._stubs: + self._stubs["get_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule", + request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, + response_deserializer=backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs["get_backup_schedule"] + + @property + def update_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule, + ]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_schedule" not in self._stubs: + self._stubs["update_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule", + request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs["update_backup_schedule"] + + @property + def delete_backup_schedule( + self, + ) -> Callable[[backup_schedule.DeleteBackupScheduleRequest], empty_pb2.Empty]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_schedule" not in self._stubs: + self._stubs["delete_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule", + request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_backup_schedule"] + + @property + def list_backup_schedules( + self, + ) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + backup_schedule.ListBackupSchedulesResponse, + ]: + r"""Return a callable for the list backup schedules method over gRPC. + + Lists all the backup schedules for the database. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + ~.ListBackupSchedulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_schedules" not in self._stubs: + self._stubs["list_backup_schedules"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules", + request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, + response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs["list_backup_schedules"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py index c623769b3d..2f720afc39 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -29,6 +29,10 @@ from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -184,7 +188,8 @@ def __init__( if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -979,6 +984,150 @@ def list_database_roles( ) return self._stubs["list_database_roles"] + @property + def create_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + Awaitable[gsad_backup_schedule.BackupSchedule], + ]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a new backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_schedule" not in self._stubs: + self._stubs["create_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule", + request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs["create_backup_schedule"] + + @property + def get_backup_schedule( + self, + ) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + Awaitable[backup_schedule.BackupSchedule], + ]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets backup schedule for the input schedule name. + + Returns: + Callable[[~.GetBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_schedule" not in self._stubs: + self._stubs["get_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule", + request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, + response_deserializer=backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs["get_backup_schedule"] + + @property + def update_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + Awaitable[gsad_backup_schedule.BackupSchedule], + ]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_schedule" not in self._stubs: + self._stubs["update_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule", + request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs["update_backup_schedule"] + + @property + def delete_backup_schedule( + self, + ) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_schedule" not in self._stubs: + self._stubs["delete_backup_schedule"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule", + request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_backup_schedule"] + + @property + def list_backup_schedules( + self, + ) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + Awaitable[backup_schedule.ListBackupSchedulesResponse], + ]: + r"""Return a callable for the list backup schedules method over gRPC. + + Lists all the backup schedules for the database. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + Awaitable[~.ListBackupSchedulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_schedules" not in self._stubs: + self._stubs["list_backup_schedules"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules", + request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, + response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs["list_backup_schedules"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1222,6 +1371,81 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.create_backup_schedule: gapic_v1.method_async.wrap_method( + self.create_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup_schedule: gapic_v1.method_async.wrap_method( + self.get_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup_schedule: gapic_v1.method_async.wrap_method( + self.update_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup_schedule: gapic_v1.method_async.wrap_method( + self.delete_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_schedules: gapic_v1.method_async.wrap_method( + self.list_backup_schedules, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), } def close(self): diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py index e382274be9..285e28cdc1 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -42,6 +42,10 @@ from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -92,6 +96,14 @@ def post_create_backup(self, response): logging.log(f"Received response: {response}") return response + def pre_create_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_database(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +116,10 @@ def pre_delete_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_drop_database(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -116,6 +132,14 @@ def post_get_backup(self, response): logging.log(f"Received response: {response}") return response + def pre_get_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_database(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -156,6 +180,14 @@ def post_list_backups(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_schedules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_schedules(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_database_operations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -212,6 +244,14 @@ def post_update_backup(self, response): logging.log(f"Received response: {response}") return response + def pre_update_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_database(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -278,6 +318,31 @@ def post_create_backup( """ return response + def pre_create_backup_schedule( + self, + request: gsad_backup_schedule.CreateBackupScheduleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gsad_backup_schedule.CreateBackupScheduleRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_backup_schedule( + self, response: gsad_backup_schedule.BackupSchedule + ) -> gsad_backup_schedule.BackupSchedule: + """Post-rpc interceptor for create_backup_schedule + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + def pre_create_database( self, request: spanner_database_admin.CreateDatabaseRequest, @@ -311,6 +376,18 @@ def pre_delete_backup( """ return request, metadata + def pre_delete_backup_schedule( + self, + request: backup_schedule.DeleteBackupScheduleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_schedule.DeleteBackupScheduleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + def pre_drop_database( self, request: spanner_database_admin.DropDatabaseRequest, @@ -342,6 +419,29 @@ def post_get_backup(self, response: backup.Backup) -> backup.Backup: """ return response + def pre_get_backup_schedule( + self, + request: backup_schedule.GetBackupScheduleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_schedule.GetBackupScheduleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_backup_schedule( + self, response: backup_schedule.BackupSchedule + ) -> backup_schedule.BackupSchedule: + """Post-rpc interceptor for get_backup_schedule + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + def pre_get_database( self, request: spanner_database_admin.GetDatabaseRequest, @@ -453,6 +553,29 @@ def post_list_backups( """ return response + def pre_list_backup_schedules( + self, + request: backup_schedule.ListBackupSchedulesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backup_schedule.ListBackupSchedulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_schedules + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backup_schedules( + self, response: backup_schedule.ListBackupSchedulesResponse + ) -> backup_schedule.ListBackupSchedulesResponse: + """Post-rpc interceptor for list_backup_schedules + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + def pre_list_database_operations( self, request: spanner_database_admin.ListDatabaseOperationsRequest, @@ -616,6 +739,31 @@ def post_update_backup(self, response: gsad_backup.Backup) -> gsad_backup.Backup """ return response + def pre_update_backup_schedule( + self, + request: gsad_backup_schedule.UpdateBackupScheduleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gsad_backup_schedule.UpdateBackupScheduleRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_backup_schedule( + self, response: gsad_backup_schedule.BackupSchedule + ) -> gsad_backup_schedule.BackupSchedule: + """Post-rpc interceptor for update_backup_schedule + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + def pre_update_database( self, request: spanner_database_admin.UpdateDatabaseRequest, @@ -1147,6 +1295,106 @@ def __call__( resp = self._interceptor.post_create_backup(resp) return resp + class _CreateBackupSchedule(DatabaseAdminRestStub): + def __hash__(self): + return hash("CreateBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupScheduleId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsad_backup_schedule.CreateBackupScheduleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Call the create backup schedule method over HTTP. + + Args: + request (~.gsad_backup_schedule.CreateBackupScheduleRequest): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsad_backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules", + "body": "backup_schedule", + }, + ] + request, metadata = self._interceptor.pre_create_backup_schedule( + request, metadata + ) + pb_request = gsad_backup_schedule.CreateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup_schedule.BackupSchedule() + pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_schedule(resp) + return resp + class _CreateDatabase(DatabaseAdminRestStub): def __hash__(self): return hash("CreateDatabase") @@ -1315,9 +1563,159 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DropDatabase(DatabaseAdminRestStub): + class _DeleteBackupSchedule(DatabaseAdminRestStub): + def __hash__(self): + return hash("DeleteBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backup_schedule.DeleteBackupScheduleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete backup schedule method over HTTP. + + Args: + request (~.backup_schedule.DeleteBackupScheduleRequest): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_schedule( + request, metadata + ) + pb_request = backup_schedule.DeleteBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DropDatabase(DatabaseAdminRestStub): + def __hash__(self): + return hash("DropDatabase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: spanner_database_admin.DropDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the drop database method over HTTP. + + Args: + request (~.spanner_database_admin.DropDatabaseRequest): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{database=projects/*/instances/*/databases/*}", + }, + ] + request, metadata = self._interceptor.pre_drop_database(request, metadata) + pb_request = spanner_database_admin.DropDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBackup(DatabaseAdminRestStub): def __hash__(self): - return hash("DropDatabase") + return hash("GetBackup") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1331,33 +1729,37 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: spanner_database_admin.DropDatabaseRequest, + request: backup.GetBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ): - r"""Call the drop database method over HTTP. + ) -> backup.Backup: + r"""Call the get backup method over HTTP. Args: - request (~.spanner_database_admin.DropDatabaseRequest): + request (~.backup.GetBackupRequest): The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + ~.backup.Backup: + A backup of a Cloud Spanner database. """ http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{database=projects/*/instances/*/databases/*}", + "method": "get", + "uri": "/v1/{name=projects/*/instances/*/backups/*}", }, ] - request, metadata = self._interceptor.pre_drop_database(request, metadata) - pb_request = spanner_database_admin.DropDatabaseRequest.pb(request) + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = backup.GetBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1389,9 +1791,17 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GetBackup(DatabaseAdminRestStub): + # Return the response + resp = backup.Backup() + pb_resp = backup.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetBackupSchedule(DatabaseAdminRestStub): def __hash__(self): - return hash("GetBackup") + return hash("GetBackupSchedule") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1405,18 +1815,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backup.GetBackupRequest, + request: backup_schedule.GetBackupScheduleRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backup.Backup: - r"""Call the get backup method over HTTP. + ) -> backup_schedule.BackupSchedule: + r"""Call the get backup schedule method over HTTP. Args: - request (~.backup.GetBackupRequest): + request (~.backup_schedule.GetBackupScheduleRequest): The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1424,18 +1834,23 @@ def __call__( sent along with the request as metadata. Returns: - ~.backup.Backup: - A backup of a Cloud Spanner database. + ~.backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=projects/*/instances/*/backups/*}", + "uri": "/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}", }, ] - request, metadata = self._interceptor.pre_get_backup(request, metadata) - pb_request = backup.GetBackupRequest.pb(request) + request, metadata = self._interceptor.pre_get_backup_schedule( + request, metadata + ) + pb_request = backup_schedule.GetBackupScheduleRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1468,11 +1883,11 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backup.Backup() - pb_resp = backup.Backup.pb(resp) + resp = backup_schedule.BackupSchedule() + pb_resp = backup_schedule.BackupSchedule.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup(resp) + resp = self._interceptor.post_get_backup_schedule(resp) return resp class _GetDatabase(DatabaseAdminRestStub): @@ -1775,6 +2190,11 @@ def __call__( "uri": "/v1/{resource=projects/*/instances/*/backups/*}:getIamPolicy", "body": "*", }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:getIamPolicy", + "body": "*", + }, ] request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) pb_request = request @@ -2001,6 +2421,96 @@ def __call__( resp = self._interceptor.post_list_backups(resp) return resp + class _ListBackupSchedules(DatabaseAdminRestStub): + def __hash__(self): + return hash("ListBackupSchedules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backup_schedule.ListBackupSchedulesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup_schedule.ListBackupSchedulesResponse: + r"""Call the list backup schedules method over HTTP. + + Args: + request (~.backup_schedule.ListBackupSchedulesRequest): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backup_schedule.ListBackupSchedulesResponse: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules", + }, + ] + request, metadata = self._interceptor.pre_list_backup_schedules( + request, metadata + ) + pb_request = backup_schedule.ListBackupSchedulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_schedule.ListBackupSchedulesResponse() + pb_resp = backup_schedule.ListBackupSchedulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_schedules(resp) + return resp + class _ListDatabaseOperations(DatabaseAdminRestStub): def __hash__(self): return hash("ListDatabaseOperations") @@ -2491,6 +3001,11 @@ def __call__( "uri": "/v1/{resource=projects/*/instances/*/backups/*}:setIamPolicy", "body": "*", }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:setIamPolicy", + "body": "*", + }, ] request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) pb_request = request @@ -2588,6 +3103,11 @@ def __call__( "uri": "/v1/{resource=projects/*/instances/*/backups/*}:testIamPermissions", "body": "*", }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:testIamPermissions", + "body": "*", + }, { "method": "post", "uri": "/v1/{resource=projects/*/instances/*/databases/*/databaseRoles/*}:testIamPermissions", @@ -2738,6 +3258,106 @@ def __call__( resp = self._interceptor.post_update_backup(resp) return resp + class _UpdateBackupSchedule(DatabaseAdminRestStub): + def __hash__(self): + return hash("UpdateBackupSchedule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsad_backup_schedule.UpdateBackupScheduleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Call the update backup schedule method over HTTP. + + Args: + request (~.gsad_backup_schedule.UpdateBackupScheduleRequest): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsad_backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}", + "body": "backup_schedule", + }, + ] + request, metadata = self._interceptor.pre_update_backup_schedule( + request, metadata + ) + pb_request = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup_schedule.BackupSchedule() + pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_schedule(resp) + return resp + class _UpdateDatabase(DatabaseAdminRestStub): def __hash__(self): return hash("UpdateDatabase") @@ -2963,6 +3583,17 @@ def create_backup( # In C++ this would require a dynamic_cast return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def create_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + @property def create_database( self, @@ -2979,6 +3610,14 @@ def delete_backup(self) -> Callable[[backup.DeleteBackupRequest], empty_pb2.Empt # In C++ this would require a dynamic_cast return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup_schedule( + self, + ) -> Callable[[backup_schedule.DeleteBackupScheduleRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + @property def drop_database( self, @@ -2993,6 +3632,16 @@ def get_backup(self) -> Callable[[backup.GetBackupRequest], backup.Backup]: # In C++ this would require a dynamic_cast return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def get_backup_schedule( + self, + ) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], backup_schedule.BackupSchedule + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + @property def get_database( self, @@ -3040,6 +3689,17 @@ def list_backups( # In C++ this would require a dynamic_cast return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_schedules( + self, + ) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + backup_schedule.ListBackupSchedulesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore + @property def list_database_operations( self, @@ -3110,6 +3770,17 @@ def update_backup( # In C++ this would require a dynamic_cast return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def update_backup_schedule( + self, + ) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + @property def update_database( self, diff --git a/google/cloud/spanner_admin_database_v1/types/__init__.py b/google/cloud/spanner_admin_database_v1/types/__init__.py index a53acf5648..2743a7be51 100644 --- a/google/cloud/spanner_admin_database_v1/types/__init__.py +++ b/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -23,6 +23,7 @@ CreateBackupMetadata, CreateBackupRequest, DeleteBackupRequest, + FullBackupSpec, GetBackupRequest, ListBackupOperationsRequest, ListBackupOperationsResponse, @@ -30,6 +31,17 @@ ListBackupsResponse, UpdateBackupRequest, ) +from .backup_schedule import ( + BackupSchedule, + BackupScheduleSpec, + CreateBackupScheduleRequest, + CrontabSpec, + DeleteBackupScheduleRequest, + GetBackupScheduleRequest, + ListBackupSchedulesRequest, + ListBackupSchedulesResponse, + UpdateBackupScheduleRequest, +) from .common import ( EncryptionConfig, EncryptionInfo, @@ -74,12 +86,22 @@ "CreateBackupMetadata", "CreateBackupRequest", "DeleteBackupRequest", + "FullBackupSpec", "GetBackupRequest", "ListBackupOperationsRequest", "ListBackupOperationsResponse", "ListBackupsRequest", "ListBackupsResponse", "UpdateBackupRequest", + "BackupSchedule", + "BackupScheduleSpec", + "CreateBackupScheduleRequest", + "CrontabSpec", + "DeleteBackupScheduleRequest", + "GetBackupScheduleRequest", + "ListBackupSchedulesRequest", + "ListBackupSchedulesResponse", + "UpdateBackupScheduleRequest", "EncryptionConfig", "EncryptionInfo", "OperationProgress", diff --git a/google/cloud/spanner_admin_database_v1/types/backup.py b/google/cloud/spanner_admin_database_v1/types/backup.py index 2805eb8f7c..156f16f114 100644 --- a/google/cloud/spanner_admin_database_v1/types/backup.py +++ b/google/cloud/spanner_admin_database_v1/types/backup.py @@ -43,6 +43,7 @@ "BackupInfo", "CreateBackupEncryptionConfig", "CopyBackupEncryptionConfig", + "FullBackupSpec", }, ) @@ -141,6 +142,20 @@ class Backup(proto.Message): UpdateBackup, CopyBackup. When updating or copying an existing backup, the expiration time specified must be less than ``Backup.max_expire_time``. + backup_schedules (MutableSequence[str]): + Output only. List of backup schedule URIs + that are associated with creating this backup. + This is only applicable for scheduled backups, + and is empty for on-demand backups. + + To optimize for storage, whenever possible, + multiple schedules are collapsed together to + create one backup. In such cases, this field + captures the list of all backup schedule URIs + that are associated with creating this backup. + If collapsing is not done, then this field + captures the single backup schedule URI + associated with creating this backup. """ class State(proto.Enum): @@ -221,6 +236,10 @@ class State(proto.Enum): number=12, message=timestamp_pb2.Timestamp, ) + backup_schedules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) class CreateBackupRequest(proto.Message): @@ -972,4 +991,12 @@ class EncryptionType(proto.Enum): ) +class FullBackupSpec(proto.Message): + r"""The specification for full backups. + A full backup stores the entire contents of the database at a + given version time. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/google/cloud/spanner_admin_database_v1/types/backup_schedule.py new file mode 100644 index 0000000000..14ea180bc3 --- /dev/null +++ b/google/cloud/spanner_admin_database_v1/types/backup_schedule.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.spanner.admin.database.v1", + manifest={ + "BackupScheduleSpec", + "BackupSchedule", + "CrontabSpec", + "CreateBackupScheduleRequest", + "GetBackupScheduleRequest", + "DeleteBackupScheduleRequest", + "ListBackupSchedulesRequest", + "ListBackupSchedulesResponse", + "UpdateBackupScheduleRequest", + }, +) + + +class BackupScheduleSpec(proto.Message): + r"""Defines specifications of the backup schedule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cron_spec (google.cloud.spanner_admin_database_v1.types.CrontabSpec): + Cron style schedule specification. + + This field is a member of `oneof`_ ``schedule_spec``. + """ + + cron_spec: "CrontabSpec" = proto.Field( + proto.MESSAGE, + number=1, + oneof="schedule_spec", + message="CrontabSpec", + ) + + +class BackupSchedule(proto.Message): + r"""BackupSchedule expresses the automated backup creation + specification for a Spanner database. + Next ID: 10 + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. Output only for the + [CreateBackupSchedule][DatabaseAdmin.CreateBackupSchededule] + operation. Required for the + [UpdateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule] + operation. A globally unique identifier for the backup + schedule which cannot be changed. Values are of the form + ``projects//instances//databases//backupSchedules/[a-z][a-z0-9_\-]*[a-z0-9]`` + The final segment of the name must be between 2 and 60 + characters in length. + spec (google.cloud.spanner_admin_database_v1.types.BackupScheduleSpec): + Optional. The schedule specification based on + which the backup creations are triggered. + retention_duration (google.protobuf.duration_pb2.Duration): + Optional. The retention duration of a backup + that must be at least 6 hours and at most 366 + days. The backup is eligible to be automatically + deleted once the retention period has elapsed. + encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): + Optional. The encryption configuration that + will be used to encrypt the backup. If this + field is not specified, the backup will use the + same encryption configuration as the database. + full_backup_spec (google.cloud.spanner_admin_database_v1.types.FullBackupSpec): + The schedule creates only full backups. + + This field is a member of `oneof`_ ``backup_type_spec``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which the + schedule was last updated. If the schedule has + never been updated, this field contains the + timestamp when the schedule was first created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + spec: "BackupScheduleSpec" = proto.Field( + proto.MESSAGE, + number=6, + message="BackupScheduleSpec", + ) + retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + encryption_config: backup.CreateBackupEncryptionConfig = proto.Field( + proto.MESSAGE, + number=4, + message=backup.CreateBackupEncryptionConfig, + ) + full_backup_spec: backup.FullBackupSpec = proto.Field( + proto.MESSAGE, + number=7, + oneof="backup_type_spec", + message=backup.FullBackupSpec, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +class CrontabSpec(proto.Message): + r"""CrontabSpec can be used to specify the version time and + frequency at which the backup should be created. + + Attributes: + text (str): + Required. Textual representation of the crontab. User can + customize the backup frequency and the backup version time + using the cron expression. The version time must be in UTC + timzeone. + + The backup will contain an externally consistent copy of the + database at the version time. Allowed frequencies are 12 + hour, 1 day, 1 week and 1 month. Examples of valid cron + specifications: + + - ``0 2/12 * * *`` : every 12 hours at (2, 14) hours past + midnight in UTC. + - ``0 2,14 * * *`` : every 12 hours at (2,14) hours past + midnight in UTC. + - ``0 2 * * *`` : once a day at 2 past midnight in UTC. + - ``0 2 * * 0`` : once a week every Sunday at 2 past + midnight in UTC. + - ``0 2 8 * *`` : once a month on 8th day at 2 past + midnight in UTC. + time_zone (str): + Output only. The time zone of the times in + ``CrontabSpec.text``. Currently only UTC is supported. + creation_window (google.protobuf.duration_pb2.Duration): + Output only. Schedule backups will contain an externally + consistent copy of the database at the version time + specified in ``schedule_spec.cron_spec``. However, Spanner + may not initiate the creation of the scheduled backups at + that version time. Spanner will initiate the creation of + scheduled backups within the time window bounded by the + version_time specified in ``schedule_spec.cron_spec`` and + version_time + ``creation_window``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + time_zone: str = proto.Field( + proto.STRING, + number=2, + ) + creation_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class CreateBackupScheduleRequest(proto.Message): + r"""The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + + Attributes: + parent (str): + Required. The name of the database that this + backup schedule applies to. + backup_schedule_id (str): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the full + backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_schedule_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_schedule: "BackupSchedule" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupSchedule", + ) + + +class GetBackupScheduleRequest(proto.Message): + r"""The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + + Attributes: + name (str): + Required. The name of the schedule to retrieve. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupScheduleRequest(proto.Message): + r"""The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + + Attributes: + name (str): + Required. The name of the schedule to delete. Values are of + the form + ``projects//instances//databases//backupSchedules/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupSchedulesRequest(proto.Message): + r"""The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Attributes: + parent (str): + Required. Database is the parent resource + whose backup schedules should be listed. Values + are of the form + projects//instances//databases/ + page_size (int): + Optional. Number of backup schedules to be + returned in the response. If 0 or less, defaults + to the server's maximum allowed page size. + page_token (str): + Optional. If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupSchedulesResponse.next_page_token] + from a previous + [ListBackupSchedulesResponse][google.spanner.admin.database.v1.ListBackupSchedulesResponse] + to the same ``parent``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupSchedulesResponse(proto.Message): + r"""The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Attributes: + backup_schedules (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupSchedule]): + The list of backup schedules for a database. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules] + call to fetch more of the schedules. + """ + + @property + def raw_page(self): + return self + + backup_schedules: MutableSequence["BackupSchedule"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupSchedule", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateBackupScheduleRequest(proto.Message): + r"""The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + + Attributes: + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated as + specified by ``update_mask`` are required. Other fields are + ignored. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + the BackupSchedule resource should be updated. + This mask is relative to the BackupSchedule + resource, not to the request message. The field + mask must always be specified; this prevents any + future fields from being erased accidentally. + """ + + backup_schedule: "BackupSchedule" = proto.Field( + proto.MESSAGE, + number=1, + message="BackupSchedule", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index 08380012aa..7bae63ff52 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -38,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py index c32f583282..ee70ea889a 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index 5fb9f55688..347688dedb 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -150,7 +150,8 @@ def __init__( if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index 99ac7f443a..b21d57f4fa 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -197,7 +197,8 @@ def __init__( if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index d1c5827f47..cb1981d8b2 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -40,6 +40,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py index 73fdbcffa2..14c8e8d02f 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -89,6 +89,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -101,7 +103,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index 9293258ea4..a2afa32174 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -129,7 +129,8 @@ def __init__( if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index 25b5ae1866..3b805cba30 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -176,7 +176,8 @@ def __init__( if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index 465a39fbdb..1546f66c83 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -1320,8 +1320,100 @@ class ReadRequest(proto.Message): If the field is set to ``true`` but the request does not set ``partition_token``, the API returns an ``INVALID_ARGUMENT`` error. + order_by (google.cloud.spanner_v1.types.ReadRequest.OrderBy): + Optional. Order for the returned rows. + + By default, Spanner will return result rows in primary key + order except for PartitionRead requests. For applications + that do not require rows to be returned in primary key + (``ORDER_BY_PRIMARY_KEY``) order, setting + ``ORDER_BY_NO_ORDER`` option allows Spanner to optimize row + retrieval, resulting in lower latencies in certain cases + (e.g. bulk point lookups). + lock_hint (google.cloud.spanner_v1.types.ReadRequest.LockHint): + Optional. Lock Hint for the request, it can + only be used with read-write transactions. """ + class OrderBy(proto.Enum): + r"""An option to control the order in which rows are returned + from a read. + + Values: + ORDER_BY_UNSPECIFIED (0): + Default value. + + ORDER_BY_UNSPECIFIED is equivalent to ORDER_BY_PRIMARY_KEY. + ORDER_BY_PRIMARY_KEY (1): + Read rows are returned in primary key order. + + In the event that this option is used in conjunction with + the ``partition_token`` field, the API will return an + ``INVALID_ARGUMENT`` error. + ORDER_BY_NO_ORDER (2): + Read rows are returned in any order. + """ + ORDER_BY_UNSPECIFIED = 0 + ORDER_BY_PRIMARY_KEY = 1 + ORDER_BY_NO_ORDER = 2 + + class LockHint(proto.Enum): + r"""A lock hint mechanism for reads done within a transaction. + + Values: + LOCK_HINT_UNSPECIFIED (0): + Default value. + + LOCK_HINT_UNSPECIFIED is equivalent to LOCK_HINT_SHARED. + LOCK_HINT_SHARED (1): + Acquire shared locks. + + By default when you perform a read as part of a read-write + transaction, Spanner acquires shared read locks, which + allows other reads to still access the data until your + transaction is ready to commit. When your transaction is + committing and writes are being applied, the transaction + attempts to upgrade to an exclusive lock for any data you + are writing. For more information about locks, see `Lock + modes `__. + LOCK_HINT_EXCLUSIVE (2): + Acquire exclusive locks. + + Requesting exclusive locks is beneficial if you observe high + write contention, which means you notice that multiple + transactions are concurrently trying to read and write to + the same data, resulting in a large number of aborts. This + problem occurs when two transactions initially acquire + shared locks and then both try to upgrade to exclusive locks + at the same time. In this situation both transactions are + waiting for the other to give up their lock, resulting in a + deadlocked situation. Spanner is able to detect this + occurring and force one of the transactions to abort. + However, this is a slow and expensive operation and results + in lower performance. In this case it makes sense to acquire + exclusive locks at the start of the transaction because then + when multiple transactions try to act on the same data, they + automatically get serialized. Each transaction waits its + turn to acquire the lock and avoids getting into deadlock + situations. + + Because the exclusive lock hint is just a hint, it should + not be considered equivalent to a mutex. In other words, you + should not use Spanner exclusive locks as a mutual exclusion + mechanism for the execution of code outside of Spanner. + + **Note:** Request exclusive locks judiciously because they + block others from reading that data for the entire + transaction, rather than just when the writes are being + performed. Unless you observe high write contention, you + should use the default of shared read locks so you don't + prematurely block other clients from reading the data that + you're writing to. + """ + LOCK_HINT_UNSPECIFIED = 0 + LOCK_HINT_SHARED = 1 + LOCK_HINT_EXCLUSIVE = 2 + session: str = proto.Field( proto.STRING, number=1, @@ -1374,6 +1466,16 @@ class ReadRequest(proto.Message): proto.BOOL, number=15, ) + order_by: OrderBy = proto.Field( + proto.ENUM, + number=16, + enum=OrderBy, + ) + lock_hint: LockHint = proto.Field( + proto.ENUM, + number=17, + enum=LockHint, + ) class BeginTransactionRequest(proto.Message): diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index 1593b7449a..86a6b4fa78 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.47.0" + "version": "0.1.0" }, "snippets": [ { @@ -196,6 +196,183 @@ ], "title": "spanner_v1_generated_database_admin_copy_backup_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "backup_schedule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "spanner_v1_generated_database_admin_create_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "backup_schedule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py" + }, { "canonical": true, "clientMethod": { @@ -507,15 +684,327 @@ "file": "spanner_v1_generated_database_admin_create_database_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "spanner_v1_generated_database_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "spanner_v1_generated_database_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -525,22 +1014,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_create_database_sync.py" + "title": "spanner_v1_generated_database_admin_delete_backup_sync.py" }, { "canonical": true, @@ -550,22 +1037,22 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", "shortName": "DatabaseAdminAsyncClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.drop_database", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "DeleteBackup" + "shortName": "DropDatabase" }, "parameters": [ { "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" }, { - "name": "name", + "name": "database", "type": "str" }, { @@ -581,13 +1068,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_backup" + "shortName": "drop_database" }, - "description": "Sample for DeleteBackup", - "file": "spanner_v1_generated_database_admin_delete_backup_async.py", + "description": "Sample for DropDatabase", + "file": "spanner_v1_generated_database_admin_drop_database_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_async", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_async", "segments": [ { "end": 49, @@ -618,7 +1105,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_delete_backup_async.py" + "title": "spanner_v1_generated_database_admin_drop_database_async.py" }, { "canonical": true, @@ -627,22 +1114,22 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", "shortName": "DatabaseAdminClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.drop_database", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "DeleteBackup" + "shortName": "DropDatabase" }, "parameters": [ { "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" }, { - "name": "name", + "name": "database", "type": "str" }, { @@ -658,13 +1145,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_backup" + "shortName": "drop_database" }, - "description": "Sample for DeleteBackup", - "file": "spanner_v1_generated_database_admin_delete_backup_sync.py", + "description": "Sample for DropDatabase", + "file": "spanner_v1_generated_database_admin_drop_database_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_sync", "segments": [ { "end": 49, @@ -695,7 +1182,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_delete_backup_sync.py" + "title": "spanner_v1_generated_database_admin_drop_database_sync.py" }, { "canonical": true, @@ -705,22 +1192,22 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", "shortName": "DatabaseAdminAsyncClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.drop_database", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup_schedule", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "DropDatabase" + "shortName": "GetBackupSchedule" }, "parameters": [ { "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" }, { - "name": "database", + "name": "name", "type": "str" }, { @@ -736,21 +1223,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "drop_database" + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" }, - "description": "Sample for DropDatabase", - "file": "spanner_v1_generated_database_admin_drop_database_async.py", + "description": "Sample for GetBackupSchedule", + "file": "spanner_v1_generated_database_admin_get_backup_schedule_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_async", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -765,15 +1253,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_drop_database_async.py" + "title": "spanner_v1_generated_database_admin_get_backup_schedule_async.py" }, { "canonical": true, @@ -782,22 +1272,22 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", "shortName": "DatabaseAdminClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.drop_database", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup_schedule", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "DropDatabase" + "shortName": "GetBackupSchedule" }, "parameters": [ { "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" }, { - "name": "database", + "name": "name", "type": "str" }, { @@ -813,21 +1303,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "drop_database" + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" }, - "description": "Sample for DropDatabase", - "file": "spanner_v1_generated_database_admin_drop_database_sync.py", + "description": "Sample for GetBackupSchedule", + "file": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_sync", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -842,15 +1333,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_drop_database_sync.py" + "title": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py" }, { "canonical": true, @@ -1313,27 +1806,188 @@ "type": "SHORT" }, { - "end": 40, - "start": 38, + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_database_admin_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_database_admin_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 41, + "end": 46, + "start": 42, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_get_database_sync.py" + "title": "spanner_v1_generated_database_admin_get_iam_policy_sync.py" }, { "canonical": true, @@ -1343,22 +1997,22 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", "shortName": "DatabaseAdminAsyncClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_iam_policy", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_operations", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "GetIamPolicy" + "shortName": "ListBackupOperations" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" }, { - "name": "resource", + "name": "parent", "type": "str" }, { @@ -1374,14 +2028,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager", + "shortName": "list_backup_operations" }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_database_admin_get_iam_policy_async.py", + "description": "Sample for ListBackupOperations", + "file": "spanner_v1_generated_database_admin_list_backup_operations_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async", "segments": [ { "end": 52, @@ -1394,27 +2048,27 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 42, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_get_iam_policy_async.py" + "title": "spanner_v1_generated_database_admin_list_backup_operations_async.py" }, { "canonical": true, @@ -1423,22 +2077,22 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", "shortName": "DatabaseAdminClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_iam_policy", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_operations", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "GetIamPolicy" + "shortName": "ListBackupOperations" }, "parameters": [ { "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" }, { - "name": "resource", + "name": "parent", "type": "str" }, { @@ -1454,14 +2108,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager", + "shortName": "list_backup_operations" }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_database_admin_get_iam_policy_sync.py", + "description": "Sample for ListBackupOperations", + "file": "spanner_v1_generated_database_admin_list_backup_operations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync", "segments": [ { "end": 52, @@ -1474,27 +2128,27 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 46, - "start": 42, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_get_iam_policy_sync.py" + "title": "spanner_v1_generated_database_admin_list_backup_operations_sync.py" }, { "canonical": true, @@ -1504,19 +2158,19 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", "shortName": "DatabaseAdminAsyncClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_operations", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_schedules", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "ListBackupOperations" + "shortName": "ListBackupSchedules" }, "parameters": [ { "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" }, { "name": "parent", @@ -1535,14 +2189,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager", - "shortName": "list_backup_operations" + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager", + "shortName": "list_backup_schedules" }, - "description": "Sample for ListBackupOperations", - "file": "spanner_v1_generated_database_admin_list_backup_operations_async.py", + "description": "Sample for ListBackupSchedules", + "file": "spanner_v1_generated_database_admin_list_backup_schedules_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async", "segments": [ { "end": 52, @@ -1575,7 +2229,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_list_backup_operations_async.py" + "title": "spanner_v1_generated_database_admin_list_backup_schedules_async.py" }, { "canonical": true, @@ -1584,19 +2238,19 @@ "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", "shortName": "DatabaseAdminClient" }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_operations", + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_schedules", "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", "service": { "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", "shortName": "DatabaseAdmin" }, - "shortName": "ListBackupOperations" + "shortName": "ListBackupSchedules" }, "parameters": [ { "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" }, { "name": "parent", @@ -1615,14 +2269,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager", - "shortName": "list_backup_operations" + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager", + "shortName": "list_backup_schedules" }, - "description": "Sample for ListBackupOperations", - "file": "spanner_v1_generated_database_admin_list_backup_operations_sync.py", + "description": "Sample for ListBackupSchedules", + "file": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync", "segments": [ { "end": 52, @@ -1655,7 +2309,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "spanner_v1_generated_database_admin_list_backup_operations_sync.py" + "title": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py" }, { "canonical": true, @@ -2808,6 +3462,175 @@ ], "title": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "spanner_v1_generated_database_admin_update_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 7c40f33740..0811b451cb 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.47.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index 49b8b08480..4384d19e2a 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.47.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py new file mode 100644 index 0000000000..e9a386c6bf --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py new file mode 100644 index 0000000000..e4ae46f99c --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py new file mode 100644 index 0000000000..27aa572802 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py new file mode 100644 index 0000000000..47ee67b992 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py new file mode 100644 index 0000000000..98d8375bfe --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py new file mode 100644 index 0000000000..c061c92be2 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py new file mode 100644 index 0000000000..b6b8517ff6 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py new file mode 100644 index 0000000000..64c4872f35 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py new file mode 100644 index 0000000000..767ae35969 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] diff --git a/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py new file mode 100644 index 0000000000..43e2d7ff79 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] diff --git a/scripts/fixup_spanner_admin_database_v1_keywords.py b/scripts/fixup_spanner_admin_database_v1_keywords.py index c0ae624bb9..0c7fea2c42 100644 --- a/scripts/fixup_spanner_admin_database_v1_keywords.py +++ b/scripts/fixup_spanner_admin_database_v1_keywords.py @@ -41,15 +41,19 @@ class spanner_admin_databaseCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'copy_backup': ('parent', 'backup_id', 'source_backup', 'expire_time', 'encryption_config', ), 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), + 'create_backup_schedule': ('parent', 'backup_schedule_id', 'backup_schedule', ), 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', 'database_dialect', 'proto_descriptors', ), 'delete_backup': ('name', ), + 'delete_backup_schedule': ('name', ), 'drop_database': ('database', ), 'get_backup': ('name', ), + 'get_backup_schedule': ('name', ), 'get_database': ('name', ), 'get_database_ddl': ('database', ), 'get_iam_policy': ('resource', 'options', ), 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backup_schedules': ('parent', 'page_size', 'page_token', ), 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), 'list_database_roles': ('parent', 'page_size', 'page_token', ), 'list_databases': ('parent', 'page_size', 'page_token', ), @@ -57,6 +61,7 @@ class spanner_admin_databaseCallTransformer(cst.CSTTransformer): 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_backup': ('backup', 'update_mask', ), + 'update_backup_schedule': ('backup_schedule', 'update_mask', ), 'update_database': ('database', 'update_mask', ), 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', ), } diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py index da54fd7fa1..7177331ab7 100644 --- a/scripts/fixup_spanner_v1_keywords.py +++ b/scripts/fixup_spanner_v1_keywords.py @@ -52,9 +52,9 @@ class spannerCallTransformer(cst.CSTTransformer): 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/setup.py b/setup.py index 95ff029bc6..98b1a61748 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "sqlparse >= 0.4.4", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.20.2,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-interceptor >= 0.15.4", ] extras = { diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index 7f59b102e9..c9b63a9109 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -59,6 +59,10 @@ from google.cloud.spanner_admin_database_v1.services.database_admin import transports from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as gsad_backup_schedule, +) from google.cloud.spanner_admin_database_v1.types import common from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -68,6 +72,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1306,12 +1311,7 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_databases ] = mock_object @@ -1549,13 +1549,13 @@ def test_list_databases_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_databases(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1867,12 +1867,7 @@ async def test_create_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_database ] = mock_object @@ -2266,12 +2261,7 @@ async def test_get_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_database ] = mock_object @@ -2643,12 +2633,7 @@ async def test_update_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_database ] = mock_object @@ -3037,12 +3022,7 @@ async def test_update_database_ddl_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_database_ddl ] = mock_object @@ -3421,12 +3401,7 @@ async def test_drop_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.drop_database ] = mock_object @@ -3787,12 +3762,7 @@ async def test_get_database_ddl_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_database_ddl ] = mock_object @@ -4162,12 +4132,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy ] = mock_object @@ -4550,12 +4515,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy ] = mock_object @@ -4946,12 +4906,7 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions ] = mock_object @@ -5356,12 +5311,7 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_backup ] = mock_object @@ -5749,12 +5699,7 @@ async def test_copy_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.copy_backup ] = mock_object @@ -6013,6 +5958,7 @@ def test_get_backup(request_type, transport: str = "grpc"): referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) response = client.get_backup(request) @@ -6031,6 +5977,7 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] + assert response.backup_schedules == ["backup_schedules_value"] def test_get_backup_empty_call(): @@ -6136,6 +6083,7 @@ async def test_get_backup_empty_call_async(): referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) ) response = await client.get_backup() @@ -6165,12 +6113,7 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_backup ] = mock_object @@ -6213,6 +6156,7 @@ async def test_get_backup_async( referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) ) response = await client.get_backup(request) @@ -6232,6 +6176,7 @@ async def test_get_backup_async( assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] + assert response.backup_schedules == ["backup_schedules_value"] @pytest.mark.asyncio @@ -6406,6 +6351,7 @@ def test_update_backup(request_type, transport: str = "grpc"): referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) response = client.update_backup(request) @@ -6424,6 +6370,7 @@ def test_update_backup(request_type, transport: str = "grpc"): assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] + assert response.backup_schedules == ["backup_schedules_value"] def test_update_backup_empty_call(): @@ -6525,6 +6472,7 @@ async def test_update_backup_empty_call_async(): referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) ) response = await client.update_backup() @@ -6556,12 +6504,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_backup ] = mock_object @@ -6604,6 +6547,7 @@ async def test_update_backup_async( referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) ) response = await client.update_backup(request) @@ -6623,6 +6567,7 @@ async def test_update_backup_async( assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] + assert response.backup_schedules == ["backup_schedules_value"] @pytest.mark.asyncio @@ -6936,12 +6881,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup ] = mock_object @@ -7300,12 +7240,7 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_backups ] = mock_object @@ -7542,13 +7477,13 @@ def test_list_backups_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backups(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7864,12 +7799,7 @@ async def test_restore_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.restore_database ] = mock_object @@ -8268,12 +8198,7 @@ async def test_list_database_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_database_operations ] = mock_object @@ -8523,13 +8448,13 @@ def test_list_database_operations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_database_operations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8863,12 +8788,7 @@ async def test_list_backup_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_backup_operations ] = mock_object @@ -9117,13 +9037,13 @@ def test_list_backup_operations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_backup_operations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9454,12 +9374,7 @@ async def test_list_database_roles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_database_roles ] = mock_object @@ -9709,13 +9624,13 @@ def test_list_database_roles_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_database_roles(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9873,50 +9788,101 @@ async def test_list_database_roles_async_pages(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.ListDatabasesRequest, + gsad_backup_schedule.CreateBackupScheduleRequest, dict, ], ) -def test_list_databases_rest(request_type): +def test_create_backup_schedule(request_type, transport: str = "grpc"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse( - next_page_token="next_page_token_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule( + name="name_value", ) + response = client.create_backup_schedule(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_databases(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.CreateBackupScheduleRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == "name_value" -def test_list_databases_rest_use_cached_wrapped_rpc(): +def test_create_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest() + + +def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup_schedule.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + +def test_create_backup_schedule_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -9924,128 +9890,3951 @@ def test_list_databases_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods + assert ( + client._transport.create_backup_schedule + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.create_backup_schedule + ] = mock_rpc request = {} - client.list_databases(request) + client.create_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_databases(request) + client.create_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_databases_rest_required_fields( - request_type=spanner_database_admin.ListDatabasesRequest, -): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule( + name="name_value", + ) + ) + response = await client.create_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_create_backup_schedule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.create_backup_schedule + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_schedule + ] = mock_object - client = DatabaseAdminClient( + request = {} + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_schedule_async( + transport: str = "grpc_asyncio", + request_type=gsad_backup_schedule.CreateBackupScheduleRequest, +): + client = DatabaseAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule( + name="name_value", + ) + ) + response = await client.create_backup_schedule(request) - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.CreateBackupScheduleRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == "name_value" - response = client.list_databases(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_backup_schedule_async_from_dict(): + await test_create_backup_schedule_async(request_type=dict) -def test_list_databases_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.CreateBackupScheduleRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.CreateBackupScheduleRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule() + ) + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_schedule( + parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + backup_schedule_id="backup_schedule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name="name_value") + assert arg == mock_val + arg = args[0].backup_schedule_id + mock_val = "backup_schedule_id_value" + assert arg == mock_val + + +def test_create_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + backup_schedule_id="backup_schedule_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_schedule( + parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + backup_schedule_id="backup_schedule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name="name_value") + assert arg == mock_val + arg = args[0].backup_schedule_id + mock_val = "backup_schedule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + backup_schedule_id="backup_schedule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_schedule.GetBackupScheduleRequest, + dict, + ], +) +def test_get_backup_schedule(request_type, transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule( + name="name_value", + ) + response = client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == "name_value" + + +def test_get_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.GetBackupScheduleRequest() + + +def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.GetBackupScheduleRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.GetBackupScheduleRequest( + name="name_value", + ) + + +def test_get_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_schedule in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_schedule + ] = mock_rpc + request = {} + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.BackupSchedule( + name="name_value", + ) + ) + response = await client.get_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.GetBackupScheduleRequest() + + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_schedule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_schedule + ] = mock_object + + request = {} + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_schedule_async( + transport: str = "grpc_asyncio", + request_type=backup_schedule.GetBackupScheduleRequest, +): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.BackupSchedule( + name="name_value", + ) + ) + response = await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_from_dict(): + await test_get_backup_schedule_async(request_type=dict) + + +def test_get_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.GetBackupScheduleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + call.return_value = backup_schedule.BackupSchedule() + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.GetBackupScheduleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.BackupSchedule() + ) + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_schedule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.BackupSchedule() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_schedule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsad_backup_schedule.UpdateBackupScheduleRequest, + dict, + ], +) +def test_update_backup_schedule(request_type, transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule( + name="name_value", + ) + response = client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == "name_value" + + +def test_update_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest() + + +def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest() + + +def test_update_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_schedule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_schedule + ] = mock_rpc + request = {} + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule( + name="name_value", + ) + ) + response = await client.update_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest() + + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup_schedule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup_schedule + ] = mock_object + + request = {} + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_schedule_async( + transport: str = "grpc_asyncio", + request_type=gsad_backup_schedule.UpdateBackupScheduleRequest, +): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule( + name="name_value", + ) + ) + response = await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_from_dict(): + await test_update_backup_schedule_async(request_type=dict) + + +def test_update_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + request.backup_schedule.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_schedule.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + request.backup_schedule.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule() + ) + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_schedule.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_schedule( + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsad_backup_schedule.BackupSchedule() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_schedule( + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_schedule.DeleteBackupScheduleRequest, + dict, + ], +) +def test_delete_backup_schedule(request_type, transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.DeleteBackupScheduleRequest() + + +def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.DeleteBackupScheduleRequest( + name="name_value", + ) + + +def test_delete_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_schedule + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_schedule + ] = mock_rpc + request = {} + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.DeleteBackupScheduleRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_schedule + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_schedule + ] = mock_object + + request = {} + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async( + transport: str = "grpc_asyncio", + request_type=backup_schedule.DeleteBackupScheduleRequest, +): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_from_dict(): + await test_delete_backup_schedule_async(request_type=dict) + + +def test_delete_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.DeleteBackupScheduleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + call.return_value = None + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.DeleteBackupScheduleRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_schedule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_schedule( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backup_schedule.ListBackupSchedulesRequest, + dict, + ], +) +def test_list_backup_schedules(request_type, transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_backup_schedules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_schedules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.ListBackupSchedulesRequest() + + +def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.ListBackupSchedulesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_schedules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.ListBackupSchedulesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_backup_schedules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_schedules + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_schedules + ] = mock_rpc + request = {} + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_schedules_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.ListBackupSchedulesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_backup_schedules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.ListBackupSchedulesRequest() + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_schedules + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_schedules + ] = mock_object + + request = {} + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async( + transport: str = "grpc_asyncio", + request_type=backup_schedule.ListBackupSchedulesRequest, +): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.ListBackupSchedulesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_from_dict(): + await test_list_backup_schedules_async(request_type=dict) + + +def test_list_backup_schedules_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.ListBackupSchedulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + call.return_value = backup_schedule.ListBackupSchedulesResponse() + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_schedules_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.ListBackupSchedulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.ListBackupSchedulesResponse() + ) + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_schedules_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_schedules( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_schedules_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backup_schedule.ListBackupSchedulesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_schedules( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent="parent_value", + ) + + +def test_list_backup_schedules_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token="abc", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token="def", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token="ghi", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_schedules(request={}) + + assert pager._metadata == expected_metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) for i in results) + + +def test_list_backup_schedules_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token="abc", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token="def", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token="ghi", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_schedules(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token="abc", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token="def", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token="ghi", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_schedules( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token="abc", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token="def", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token="ghi", + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_schedules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_database_admin.ListDatabasesRequest, + dict, + ], +) +def test_list_databases_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_databases_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_databases_rest_required_fields( + request_type=spanner_database_admin.ListDatabasesRequest, +): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_list_databases" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner_database_admin.ListDatabasesRequest.pb( + spanner_database_admin.ListDatabasesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + spanner_database_admin.ListDatabasesResponse.to_json( + spanner_database_admin.ListDatabasesResponse() + ) + ) + + request = spanner_database_admin.ListDatabasesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabasesResponse() + + client.list_databases( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_databases_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.ListDatabasesRequest +): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_databases(request) + + +def test_list_databases_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/instances/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, + args[1], + ) + + +def test_list_databases_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_rest_pager(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token="abc", + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token="ghi", + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + spanner_database_admin.ListDatabasesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/instances/sample2"} + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.Database) for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_database_admin.CreateDatabaseRequest, + dict, + ], +) +def test_create_database_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_database_rest_required_fields( + request_type=spanner_database_admin.CreateDatabaseRequest, +): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["create_statement"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["createStatement"] = "create_statement_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "createStatement" in jsonified_request + assert jsonified_request["createStatement"] == "create_statement_value" + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "createStatement", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_create_database" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_create_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner_database_admin.CreateDatabaseRequest.pb( + spanner_database_admin.CreateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = spanner_database_admin.CreateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_database_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.CreateDatabaseRequest +): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_database(request) + + +def test_create_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/instances/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + create_statement="create_statement_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, + args[1], + ) + + +def test_create_database_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent="parent_value", + create_statement="create_statement_value", + ) + + +def test_create_database_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_database_admin.GetDatabaseRequest, + dict, + ], +) +def test_get_database_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/instances/sample2/databases/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database( + name="name_value", + state=spanner_database_admin.Database.State.CREATING, + version_retention_period="version_retention_period_value", + default_leader="default_leader_value", + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == "name_value" + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == "version_retention_period_value" + assert response.default_leader == "default_leader_value" + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +def test_get_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_database] = mock_rpc + + request = {} + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_database_rest_required_fields( + request_type=spanner_database_admin.GetDatabaseRequest, +): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_get_database" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_get_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner_database_admin.GetDatabaseRequest.pb( + spanner_database_admin.GetDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = spanner_database_admin.Database.to_json( + spanner_database_admin.Database() + ) + + request = spanner_database_admin.GetDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.Database() + + client.get_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_database_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.GetDatabaseRequest +): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/instances/sample2/databases/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_database(request) + + +def test_get_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/instances/sample2/databases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/instances/*/databases/*}" % client.transport._host, + args[1], + ) + + +def test_get_database_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name="name_value", + ) + + +def test_get_database_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_database_admin.UpdateDatabaseRequest, + dict, + ], +) +def test_update_database_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} + } + request_init["database"] = { + "name": "projects/sample1/instances/sample2/databases/sample3", + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "restore_info": { + "source_type": 1, + "backup_info": { + "backup": "backup_value", + "version_time": {}, + "create_time": {}, + "source_database": "source_database_value", + }, + }, + "encryption_config": { + "kms_key_name": "kms_key_name_value", + "kms_key_names": ["kms_key_names_value1", "kms_key_names_value2"], + }, + "encryption_info": [ + { + "encryption_type": 1, + "encryption_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "kms_key_version": "kms_key_version_value", + } + ], + "version_retention_period": "version_retention_period_value", + "earliest_version_time": {}, + "default_leader": "default_leader_value", + "database_dialect": 1, + "enable_drop_protection": True, + "reconciling": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_database(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + + request = {} + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_rest_required_fields( + request_type=spanner_database_admin.UpdateDatabaseRequest, +): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "database", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_update_database" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_update_database" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner_database_admin.UpdateDatabaseRequest.pb( + spanner_database_admin.UpdateDatabaseRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = spanner_database_admin.UpdateDatabaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_database( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_database_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.UpdateDatabaseRequest +): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_database(request) + + +def test_update_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + database=spanner_database_admin.Database(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{database.name=projects/*/instances/*/databases/*}" + % client.transport._host, + args[1], + ) + + +def test_update_database_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_database_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_database_admin.UpdateDatabaseDdlRequest, + dict, + ], +) +def test_update_database_ddl_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_database_ddl(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_database_ddl_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_database_ddl in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_database_ddl + ] = mock_rpc + + request = {} + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_ddl_rest_required_fields( + request_type=spanner_database_admin.UpdateDatabaseDdlRequest, +): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request_init["statements"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = "database_value" + jsonified_request["statements"] = "statements_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == "database_value" + assert "statements" in jsonified_request + assert jsonified_request["statements"] == "statements_value" + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_database_ddl(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_database_ddl_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_database_ddl._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "database", + "statements", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_ddl_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_update_database_ddl" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_update_database_ddl" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner_database_admin.UpdateDatabaseDdlRequest.pb( + spanner_database_admin.UpdateDatabaseDdlRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = spanner_database_admin.UpdateDatabaseDdlRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_database_ddl( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_database_ddl_rest_bad_request( + transport: str = "rest", + request_type=spanner_database_admin.UpdateDatabaseDdlRequest, +): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_database_ddl(request) + + +def test_update_database_ddl_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "database": "projects/sample1/instances/sample2/databases/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + database="database_value", + statements=["statements_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_database_ddl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{database=projects/*/instances/*/databases/*}/ddl" + % client.transport._host, + args[1], + ) + + +def test_update_database_ddl_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database="database_value", + statements=["statements_value"], + ) + + +def test_update_database_ddl_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_database_admin.DropDatabaseRequest, + dict, + ], +) +def test_drop_database_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.drop_database(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_drop_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - & set(("parent",)) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.drop_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc + + request = {} + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_drop_database_rest_required_fields( + request_type=spanner_database_admin.DropDatabaseRequest, +): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).drop_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = "database_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).drop_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == "database_value" + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.drop_database(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_drop_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.drop_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database",))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): +def test_drop_database_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10058,14 +13847,11 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_list_databases" - ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_list_databases" + transports.DatabaseAdminRestInterceptor, "pre_drop_database" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = spanner_database_admin.ListDatabasesRequest.pb( - spanner_database_admin.ListDatabasesRequest() + pb_message = spanner_database_admin.DropDatabaseRequest.pb( + spanner_database_admin.DropDatabaseRequest() ) transcode.return_value = { "method": "post", @@ -10077,21 +13863,15 @@ def test_list_databases_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - spanner_database_admin.ListDatabasesResponse.to_json( - spanner_database_admin.ListDatabasesResponse() - ) - ) - request = spanner_database_admin.ListDatabasesRequest() + request = spanner_database_admin.DropDatabaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabasesResponse() - client.list_databases( + client.drop_database( request, metadata=[ ("key", "val"), @@ -10100,11 +13880,10 @@ def test_list_databases_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_databases_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.ListDatabasesRequest +def test_drop_database_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.DropDatabaseRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10112,7 +13891,7 @@ def test_list_databases_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10124,10 +13903,10 @@ def test_list_databases_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_databases(request) + client.drop_database(request) -def test_list_databases_rest_flattened(): +def test_drop_database_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10136,152 +13915,103 @@ def test_list_databases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = { + "database": "projects/sample1/instances/sample2/databases/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + database="database_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_databases(**mock_args) + client.drop_database(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, + "%s/v1/{database=projects/*/instances/*/databases/*}" + % client.transport._host, args[1], ) -def test_list_databases_rest_flattened_error(transport: str = "rest"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent="parent_value", - ) - - -def test_list_databases_rest_pager(transport: str = "rest"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token="abc", - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token="ghi", - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - spanner_database_admin.ListDatabasesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/instances/sample2"} - - pager = client.list_databases(request=sample_request) +def test_drop_database_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.Database) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database="database_value", + ) - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_drop_database_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.CreateDatabaseRequest, + spanner_database_admin.GetDatabaseDdlRequest, dict, ], ) -def test_create_database_rest(request_type): +def test_get_database_ddl_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = spanner_database_admin.GetDatabaseDdlResponse( + statements=["statements_value"], + proto_descriptors=b"proto_descriptors_blob", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_database(request) + response = client.get_database_ddl(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ["statements_value"] + assert response.proto_descriptors == b"proto_descriptors_blob" -def test_create_database_rest_use_cached_wrapped_rpc(): +def test_get_database_ddl_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10295,40 +14025,37 @@ def test_create_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods + assert client._transport.get_database_ddl in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_database_ddl + ] = mock_rpc request = {} - client.create_database(request) + client.get_database_ddl(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) + client.get_database_ddl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_database_rest_required_fields( - request_type=spanner_database_admin.CreateDatabaseRequest, +def test_get_database_ddl_rest_required_fields( + request_type=spanner_database_admin.GetDatabaseDdlRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["create_statement"] = "" + request_init["database"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10339,24 +14066,21 @@ def test_create_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_database._get_unset_required_fields(jsonified_request) + ).get_database_ddl._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["createStatement"] = "create_statement_value" + jsonified_request["database"] = "database_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_database._get_unset_required_fields(jsonified_request) + ).get_database_ddl._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "createStatement" in jsonified_request - assert jsonified_request["createStatement"] == "create_statement_value" + assert "database" in jsonified_request + assert jsonified_request["database"] == "database_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10365,7 +14089,7 @@ def test_create_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = spanner_database_admin.GetDatabaseDdlResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10377,45 +14101,41 @@ def test_create_database_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_database(request) + response = client.get_database_ddl(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_database_rest_unset_required_fields(): +def test_get_database_ddl_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "createStatement", - ) - ) - ) + unset_fields = transport.get_database_ddl._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): +def test_get_database_ddl_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10428,16 +14148,14 @@ def test_create_database_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_create_database" + transports.DatabaseAdminRestInterceptor, "post_get_database_ddl" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_create_database" + transports.DatabaseAdminRestInterceptor, "pre_get_database_ddl" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.CreateDatabaseRequest.pb( - spanner_database_admin.CreateDatabaseRequest() + pb_message = spanner_database_admin.GetDatabaseDdlRequest.pb( + spanner_database_admin.GetDatabaseDdlRequest() ) transcode.return_value = { "method": "post", @@ -10449,19 +14167,21 @@ def test_create_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + spanner_database_admin.GetDatabaseDdlResponse.to_json( + spanner_database_admin.GetDatabaseDdlResponse() + ) ) - request = spanner_database_admin.CreateDatabaseRequest() + request = spanner_database_admin.GetDatabaseDdlRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = spanner_database_admin.GetDatabaseDdlResponse() - client.create_database( + client.get_database_ddl( request, metadata=[ ("key", "val"), @@ -10473,8 +14193,8 @@ def test_create_database_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_database_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.CreateDatabaseRequest +def test_get_database_ddl_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.GetDatabaseDdlRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10482,7 +14202,7 @@ def test_create_database_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10494,10 +14214,10 @@ def test_create_database_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_database(request) + client.get_database_ddl(request) -def test_create_database_rest_flattened(): +def test_get_database_ddl_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10506,38 +14226,42 @@ def test_create_database_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = spanner_database_admin.GetDatabaseDdlResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = { + "database": "projects/sample1/instances/sample2/databases/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - create_statement="create_statement_value", + database="database_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_database(**mock_args) + client.get_database_ddl(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, + "%s/v1/{database=projects/*/instances/*/databases/*}/ddl" + % client.transport._host, args[1], ) -def test_create_database_rest_flattened_error(transport: str = "rest"): +def test_get_database_ddl_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10546,14 +14270,13 @@ def test_create_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent="parent_value", - create_statement="create_statement_value", + client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database="database_value", ) -def test_create_database_rest_error(): +def test_get_database_ddl_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10562,56 +14285,44 @@ def test_create_database_rest_error(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.GetDatabaseRequest, + iam_policy_pb2.SetIamPolicyRequest, dict, ], ) -def test_get_database_rest(request_type): +def test_set_iam_policy_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database( - name="name_value", - state=spanner_database_admin.Database.State.CREATING, - version_retention_period="version_retention_period_value", - default_leader="default_leader_value", - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_database(request) + response = client.set_iam_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == "name_value" - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == "version_retention_period_value" - assert response.default_leader == "default_leader_value" - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" -def test_get_database_rest_use_cached_wrapped_rpc(): +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10625,37 +14336,37 @@ def test_get_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods + assert client._transport.set_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc request = {} - client.get_database(request) + client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_database(request) + client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_database_rest_required_fields( - request_type=spanner_database_admin.GetDatabaseRequest, +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["resource"] = "" request = request_type(**request_init) - pb_request = request_type.pb(request) + pb_request = request jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -10664,21 +14375,21 @@ def test_get_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_database._get_unset_required_fields(jsonified_request) + ).set_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["resource"] = "resource_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_database._get_unset_required_fields(jsonified_request) + ).set_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10687,7 +14398,7 @@ def test_get_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database() + return_value = policy_pb2.Policy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10696,42 +14407,49 @@ def test_get_database_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request_type.pb(request) + pb_request = request transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_database(request) + response = client.set_iam_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_database_rest_unset_required_fields(): +def test_set_iam_policy_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): +def test_set_iam_policy_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10744,15 +14462,13 @@ def test_get_database_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_get_database" + transports.DatabaseAdminRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_get_database" + transports.DatabaseAdminRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.GetDatabaseRequest.pb( - spanner_database_admin.GetDatabaseRequest() - ) + pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10763,19 +14479,17 @@ def test_get_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = spanner_database_admin.Database.to_json( - spanner_database_admin.Database() - ) + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) - request = spanner_database_admin.GetDatabaseRequest() + request = iam_policy_pb2.SetIamPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = spanner_database_admin.Database() + post.return_value = policy_pb2.Policy() - client.get_database( + client.set_iam_policy( request, metadata=[ ("key", "val"), @@ -10787,8 +14501,8 @@ def test_get_database_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_database_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.GetDatabaseRequest +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10796,7 +14510,7 @@ def test_get_database_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10808,10 +14522,10 @@ def test_get_database_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_database(request) + client.set_iam_policy(request) -def test_get_database_rest_flattened(): +def test_set_iam_policy_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10820,41 +14534,40 @@ def test_get_database_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database() + return_value = policy_pb2.Policy() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/instances/sample2/databases/sample3" + "resource": "projects/sample1/instances/sample2/databases/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + resource="resource_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_database(**mock_args) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/instances/*/databases/*}" % client.transport._host, + "%s/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy" + % client.transport._host, args[1], ) -def test_get_database_rest_flattened_error(transport: str = "rest"): +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10863,13 +14576,13 @@ def test_get_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name="name_value", + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", ) -def test_get_database_rest_error(): +def test_set_iam_policy_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10878,133 +14591,27 @@ def test_get_database_rest_error(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.UpdateDatabaseRequest, + iam_policy_pb2.GetIamPolicyRequest, dict, ], ) -def test_update_database_rest(request_type): +def test_get_iam_policy_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} - } - request_init["database"] = { - "name": "projects/sample1/instances/sample2/databases/sample3", - "state": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "restore_info": { - "source_type": 1, - "backup_info": { - "backup": "backup_value", - "version_time": {}, - "create_time": {}, - "source_database": "source_database_value", - }, - }, - "encryption_config": { - "kms_key_name": "kms_key_name_value", - "kms_key_names": ["kms_key_names_value1", "kms_key_names_value2"], - }, - "encryption_info": [ - { - "encryption_type": 1, - "encryption_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "kms_key_version": "kms_key_version_value", - } - ], - "version_retention_period": "version_retention_period_value", - "earliest_version_time": {}, - "default_leader": "default_leader_value", - "database_dialect": 1, - "enable_drop_protection": True, - "reconciling": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] + request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) # Wrap the value into a proper Response obj response_value = Response() @@ -11013,13 +14620,15 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_database(request) + response = client.get_iam_policy(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" -def test_update_database_rest_use_cached_wrapped_rpc(): +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11033,40 +14642,37 @@ def test_update_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods + assert client._transport.get_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc request = {} - client.update_database(request) + client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) + client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_database_rest_required_fields( - request_type=spanner_database_admin.UpdateDatabaseRequest, +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} + request_init["resource"] = "" request = request_type(**request_init) - pb_request = request_type.pb(request) + pb_request = request jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -11075,19 +14681,21 @@ def test_update_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_database._get_unset_required_fields(jsonified_request) + ).get_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["resource"] = "resource_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11096,7 +14704,7 @@ def test_update_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = policy_pb2.Policy() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11105,10 +14713,10 @@ def test_update_database_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request_type.pb(request) + pb_request = request transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -11116,37 +14724,30 @@ def test_update_database_rest_required_fields( response_value = Response() response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_database(request) + response = client.get_iam_policy(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_database_rest_unset_required_fields(): +def test_get_iam_policy_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "database", - "updateMask", - ) - ) - ) + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): +def test_get_iam_policy_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11159,17 +14760,13 @@ def test_update_database_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_update_database" + transports.DatabaseAdminRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_update_database" + transports.DatabaseAdminRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.UpdateDatabaseRequest.pb( - spanner_database_admin.UpdateDatabaseRequest() - ) + pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11180,19 +14777,17 @@ def test_update_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) - request = spanner_database_admin.UpdateDatabaseRequest() + request = iam_policy_pb2.GetIamPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = policy_pb2.Policy() - client.update_database( + client.get_iam_policy( request, metadata=[ ("key", "val"), @@ -11204,8 +14799,8 @@ def test_update_database_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_database_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.UpdateDatabaseRequest +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11213,9 +14808,7 @@ def test_update_database_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} - } + request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11227,10 +14820,10 @@ def test_update_database_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_database(request) + client.get_iam_policy(request) -def test_update_database_rest_flattened(): +def test_get_iam_policy_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11239,17 +14832,16 @@ def test_update_database_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = policy_pb2.Policy() # get arguments that satisfy an http rule for this method sample_request = { - "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} + "resource": "projects/sample1/instances/sample2/databases/sample3" } # get truthy value for each flattened field mock_args = dict( - database=spanner_database_admin.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + resource="resource_value", ) mock_args.update(sample_request) @@ -11260,20 +14852,20 @@ def test_update_database_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_database(**mock_args) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{database.name=projects/*/instances/*/databases/*}" + "%s/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy" % client.transport._host, args[1], ) -def test_update_database_rest_flattened_error(transport: str = "rest"): +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11282,14 +14874,13 @@ def test_update_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", ) -def test_update_database_rest_error(): +def test_get_iam_policy_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11298,24 +14889,26 @@ def test_update_database_rest_error(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.UpdateDatabaseDdlRequest, + iam_policy_pb2.TestIamPermissionsRequest, dict, ], ) -def test_update_database_ddl_rest(request_type): +def test_test_iam_permissions_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) # Wrap the value into a proper Response obj response_value = Response() @@ -11324,13 +14917,14 @@ def test_update_database_ddl_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_database_ddl(request) + response = client.test_iam_permissions(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] -def test_update_database_ddl_rest_use_cached_wrapped_rpc(): +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11345,7 +14939,7 @@ def test_update_database_ddl_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_database_ddl in client._transport._wrapped_methods + client._transport.test_iam_permissions in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -11354,36 +14948,32 @@ def test_update_database_ddl_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_database_ddl + client._transport.test_iam_permissions ] = mock_rpc request = {} - client.update_database_ddl(request) + client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database_ddl(request) + client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_database_ddl_rest_required_fields( - request_type=spanner_database_admin.UpdateDatabaseDdlRequest, +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["database"] = "" - request_init["statements"] = "" + request_init["resource"] = "" + request_init["permissions"] = "" request = request_type(**request_init) - pb_request = request_type.pb(request) + pb_request = request jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -11392,24 +14982,24 @@ def test_update_database_ddl_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_database_ddl._get_unset_required_fields(jsonified_request) + ).test_iam_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["database"] = "database_value" - jsonified_request["statements"] = "statements_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_database_ddl._get_unset_required_fields(jsonified_request) + ).test_iam_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" - assert "statements" in jsonified_request - assert jsonified_request["statements"] == "statements_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11418,7 +15008,7 @@ def test_update_database_ddl_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = iam_policy_pb2.TestIamPermissionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11427,10 +15017,10 @@ def test_update_database_ddl_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request_type.pb(request) + pb_request = request transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -11438,37 +15028,38 @@ def test_update_database_ddl_rest_required_fields( response_value = Response() response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_database_ddl(request) + response = client.test_iam_permissions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_database_ddl_rest_unset_required_fields(): +def test_test_iam_permissions_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_database_ddl._get_unset_required_fields({}) + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( - "database", - "statements", + "resource", + "permissions", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_ddl_rest_interceptors(null_interceptor): +def test_test_iam_permissions_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11481,17 +15072,13 @@ def test_update_database_ddl_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_update_database_ddl" + transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_update_database_ddl" + transports.DatabaseAdminRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.UpdateDatabaseDdlRequest.pb( - spanner_database_admin.UpdateDatabaseDdlRequest() - ) + pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11503,18 +15090,18 @@ def test_update_database_ddl_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + iam_policy_pb2.TestIamPermissionsResponse() ) - request = spanner_database_admin.UpdateDatabaseDdlRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.update_database_ddl( + client.test_iam_permissions( request, metadata=[ ("key", "val"), @@ -11526,9 +15113,8 @@ def test_update_database_ddl_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_database_ddl_rest_bad_request( - transport: str = "rest", - request_type=spanner_database_admin.UpdateDatabaseDdlRequest, +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11536,7 +15122,7 @@ def test_update_database_ddl_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11548,10 +15134,10 @@ def test_update_database_ddl_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_database_ddl(request) + client.test_iam_permissions(request) -def test_update_database_ddl_rest_flattened(): +def test_test_iam_permissions_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11560,17 +15146,17 @@ def test_update_database_ddl_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = iam_policy_pb2.TestIamPermissionsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "database": "projects/sample1/instances/sample2/databases/sample3" + "resource": "projects/sample1/instances/sample2/databases/sample3" } # get truthy value for each flattened field mock_args = dict( - database="database_value", - statements=["statements_value"], + resource="resource_value", + permissions=["permissions_value"], ) mock_args.update(sample_request) @@ -11581,20 +15167,20 @@ def test_update_database_ddl_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_database_ddl(**mock_args) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{database=projects/*/instances/*/databases/*}/ddl" + "%s/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions" % client.transport._host, args[1], ) -def test_update_database_ddl_rest_flattened_error(transport: str = "rest"): +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11603,14 +15189,14 @@ def test_update_database_ddl_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database="database_value", - statements=["statements_value"], + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], ) -def test_update_database_ddl_rest_error(): +def test_test_iam_permissions_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11619,39 +15205,141 @@ def test_update_database_ddl_rest_error(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.DropDatabaseRequest, + gsad_backup.CreateBackupRequest, dict, ], ) -def test_drop_database_rest(request_type): +def test_create_backup_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request_init["backup"] = { + "database": "database_value", + "version_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "name": "name_value", + "create_time": {}, + "size_bytes": 1089, + "state": 1, + "referencing_databases": [ + "referencing_databases_value1", + "referencing_databases_value2", + ], + "encryption_info": { + "encryption_type": 1, + "encryption_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "kms_key_version": "kms_key_version_value", + }, + "encryption_information": {}, + "database_dialect": 1, + "referencing_backups": [ + "referencing_backups_value1", + "referencing_backups_value2", + ], + "max_expire_time": {}, + "backup_schedules": ["backup_schedules_value1", "backup_schedules_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.drop_database(request) + response = client.create_backup(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_drop_database_rest_use_cached_wrapped_rpc(): +def test_create_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11665,35 +15353,40 @@ def test_drop_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.drop_database in client._transport._wrapped_methods + assert client._transport.create_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc request = {} - client.drop_database(request) + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.drop_database(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_drop_database_rest_required_fields( - request_type=spanner_database_admin.DropDatabaseRequest, +def test_create_backup_rest_required_fields( + request_type=gsad_backup.CreateBackupRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["database"] = "" + request_init["parent"] = "" + request_init["backup_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11701,24 +15394,37 @@ def test_drop_database_rest_required_fields( ) # verify fields with default values are dropped + assert "backupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).drop_database._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] - jsonified_request["database"] = "database_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).drop_database._get_unset_required_fields(jsonified_request) + ).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_id", + "encryption_config", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11727,7 +15433,7 @@ def test_drop_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11739,36 +15445,57 @@ def test_drop_database_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.drop_database(request) + response = client.create_backup(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_drop_database_rest_unset_required_fields(): +def test_create_backup_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.drop_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupId", + "encryptionConfig", + ) + ) + & set( + ( + "parent", + "backupId", + "backup", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_drop_database_rest_interceptors(null_interceptor): +def test_create_backup_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11781,11 +15508,16 @@ def test_drop_database_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_drop_database" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_create_backup" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() - pb_message = spanner_database_admin.DropDatabaseRequest.pb( - spanner_database_admin.DropDatabaseRequest() + post.assert_not_called() + pb_message = gsad_backup.CreateBackupRequest.pb( + gsad_backup.CreateBackupRequest() ) transcode.return_value = { "method": "post", @@ -11797,15 +15529,19 @@ def test_drop_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = spanner_database_admin.DropDatabaseRequest() + request = gsad_backup.CreateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.drop_database( + client.create_backup( request, metadata=[ ("key", "val"), @@ -11814,10 +15550,11 @@ def test_drop_database_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_drop_database_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.DropDatabaseRequest +def test_create_backup_rest_bad_request( + transport: str = "rest", request_type=gsad_backup.CreateBackupRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11825,7 +15562,7 @@ def test_drop_database_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11837,10 +15574,10 @@ def test_drop_database_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.drop_database(request) + client.create_backup(request) -def test_drop_database_rest_flattened(): +def test_create_backup_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11849,40 +15586,39 @@ def test_drop_database_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "database": "projects/sample1/instances/sample2/databases/sample3" - } + sample_request = {"parent": "projects/sample1/instances/sample2"} # get truthy value for each flattened field mock_args = dict( - database="database_value", + parent="parent_value", + backup=gsad_backup.Backup(database="database_value"), + backup_id="backup_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.drop_database(**mock_args) + client.create_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{database=projects/*/instances/*/databases/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1], ) -def test_drop_database_rest_flattened_error(transport: str = "rest"): +def test_create_backup_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11891,13 +15627,15 @@ def test_drop_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database="database_value", + client.create_backup( + gsad_backup.CreateBackupRequest(), + parent="parent_value", + backup=gsad_backup.Backup(database="database_value"), + backup_id="backup_id_value", ) -def test_drop_database_rest_error(): +def test_create_backup_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11906,46 +15644,39 @@ def test_drop_database_rest_error(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.GetDatabaseDdlRequest, + backup.CopyBackupRequest, dict, ], ) -def test_get_database_ddl_rest(request_type): +def test_copy_backup_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse( - statements=["statements_value"], - proto_descriptors=b"proto_descriptors_blob", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_database_ddl(request) + response = client.copy_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ["statements_value"] - assert response.proto_descriptors == b"proto_descriptors_blob" + assert response.operation.name == "operations/spam" -def test_get_database_ddl_rest_use_cached_wrapped_rpc(): +def test_copy_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11959,37 +15690,39 @@ def test_get_database_ddl_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_database_ddl in client._transport._wrapped_methods + assert client._transport.copy_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_database_ddl - ] = mock_rpc + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc request = {} - client.get_database_ddl(request) + client.copy_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_database_ddl(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_database_ddl_rest_required_fields( - request_type=spanner_database_admin.GetDatabaseDdlRequest, -): +def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["database"] = "" + request_init["parent"] = "" + request_init["backup_id"] = "" + request_init["source_backup"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12000,21 +15733,27 @@ def test_get_database_ddl_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_database_ddl._get_unset_required_fields(jsonified_request) + ).copy_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["database"] = "database_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["backupId"] = "backup_id_value" + jsonified_request["sourceBackup"] = "source_backup_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_database_ddl._get_unset_required_fields(jsonified_request) + ).copy_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == "backup_id_value" + assert "sourceBackup" in jsonified_request + assert jsonified_request["sourceBackup"] == "source_backup_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12023,7 +15762,7 @@ def test_get_database_ddl_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12035,41 +15774,47 @@ def test_get_database_ddl_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_database_ddl(request) + response = client.copy_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_database_ddl_rest_unset_required_fields(): +def test_copy_backup_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_database_ddl._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) + unset_fields = transport.copy_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "backupId", + "sourceBackup", + "expireTime", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_ddl_rest_interceptors(null_interceptor): +def test_copy_backup_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12082,15 +15827,15 @@ def test_get_database_ddl_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_get_database_ddl" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatabaseAdminRestInterceptor, "post_copy_backup" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_get_database_ddl" + transports.DatabaseAdminRestInterceptor, "pre_copy_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.GetDatabaseDdlRequest.pb( - spanner_database_admin.GetDatabaseDdlRequest() - ) + pb_message = backup.CopyBackupRequest.pb(backup.CopyBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12101,21 +15846,19 @@ def test_get_database_ddl_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - spanner_database_admin.GetDatabaseDdlResponse.to_json( - spanner_database_admin.GetDatabaseDdlResponse() - ) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = spanner_database_admin.GetDatabaseDdlRequest() + request = backup.CopyBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = spanner_database_admin.GetDatabaseDdlResponse() + post.return_value = operations_pb2.Operation() - client.get_database_ddl( + client.copy_backup( request, metadata=[ ("key", "val"), @@ -12127,8 +15870,8 @@ def test_get_database_ddl_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_database_ddl_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.GetDatabaseDdlRequest +def test_copy_backup_rest_bad_request( + transport: str = "rest", request_type=backup.CopyBackupRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12136,7 +15879,7 @@ def test_get_database_ddl_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12148,10 +15891,10 @@ def test_get_database_ddl_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_database_ddl(request) + client.copy_backup(request) -def test_get_database_ddl_rest_flattened(): +def test_copy_backup_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12160,42 +15903,41 @@ def test_get_database_ddl_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "database": "projects/sample1/instances/sample2/databases/sample3" - } + sample_request = {"parent": "projects/sample1/instances/sample2"} # get truthy value for each flattened field mock_args = dict( - database="database_value", + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + expire_time=timestamp_pb2.Timestamp(seconds=751), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_database_ddl(**mock_args) + client.copy_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{database=projects/*/instances/*/databases/*}/ddl" + "%s/v1/{parent=projects/*/instances/*}/backups:copy" % client.transport._host, args[1], ) -def test_get_database_ddl_rest_flattened_error(transport: str = "rest"): +def test_copy_backup_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12204,13 +15946,16 @@ def test_get_database_ddl_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database="database_value", + client.copy_backup( + backup.CopyBackupRequest(), + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + expire_time=timestamp_pb2.Timestamp(seconds=751), ) -def test_get_database_ddl_rest_error(): +def test_copy_backup_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -12219,44 +15964,58 @@ def test_get_database_ddl_rest_error(): @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + backup.GetBackupRequest, dict, ], ) -def test_set_iam_policy_rest(request_type): +def test_get_backup_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + return_value = backup.Backup( + database="database_value", + name="name_value", + size_bytes=1089, + state=backup.Backup.State.CREATING, + referencing_databases=["referencing_databases_value"], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.get_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, backup.Backup) + assert response.database == "database_value" + assert response.name == "name_value" + assert response.size_bytes == 1089 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ["referencing_databases_value"] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ["referencing_backups_value"] + assert response.backup_schedules == ["backup_schedules_value"] -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): +def test_get_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12270,37 +16029,35 @@ def test_set_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods + assert client._transport.get_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc request = {} - client.set_iam_policy(request) + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_iam_policy(request) + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.SetIamPolicyRequest, -): +def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["resource"] = "" + request_init["name"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -12309,21 +16066,21 @@ def test_set_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_iam_policy._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12332,7 +16089,7 @@ def test_set_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = backup.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12341,49 +16098,42 @@ def test_set_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) + response = client.get_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_iam_policy_rest_unset_required_fields(): +def test_get_backup_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "resource", - "policy", - ) - ) - ) + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): +def test_get_backup_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12396,13 +16146,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_set_iam_policy" + transports.DatabaseAdminRestInterceptor, "post_get_backup" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_set_iam_policy" + transports.DatabaseAdminRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() + pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12413,17 +16163,17 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value._content = backup.Backup.to_json(backup.Backup()) - request = iam_policy_pb2.SetIamPolicyRequest() + request = backup.GetBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() + post.return_value = backup.Backup() - client.set_iam_policy( + client.get_backup( request, metadata=[ ("key", "val"), @@ -12435,8 +16185,8 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_set_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=backup.GetBackupRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12444,7 +16194,7 @@ def test_set_iam_policy_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12456,10 +16206,10 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.set_iam_policy(request) + client.get_backup(request) -def test_set_iam_policy_rest_flattened(): +def test_get_backup_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12468,101 +16218,218 @@ def test_set_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = backup.Backup() # get arguments that satisfy an http rule for this method - sample_request = { - "resource": "projects/sample1/instances/sample2/databases/sample3" - } + sample_request = {"name": "projects/sample1/instances/sample2/backups/sample3"} # get truthy value for each flattened field mock_args = dict( - resource="resource_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.set_iam_policy(**mock_args) + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsad_backup.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} + } + request_init["backup"] = { + "database": "database_value", + "version_time": {"seconds": 751, "nanos": 543}, + "expire_time": {}, + "name": "projects/sample1/instances/sample2/backups/sample3", + "create_time": {}, + "size_bytes": 1089, + "state": 1, + "referencing_databases": [ + "referencing_databases_value1", + "referencing_databases_value2", + ], + "encryption_info": { + "encryption_type": 1, + "encryption_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "kms_key_version": "kms_key_version_value", + }, + "encryption_information": {}, + "database_dialect": 1, + "referencing_backups": [ + "referencing_backups_value1", + "referencing_backups_value2", + ], + "max_expire_time": {}, + "backup_schedules": ["backup_schedules_value1", "backup_schedules_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy" - % client.transport._host, - args[1], - ) + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", - ) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_set_iam_policy_rest_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.GetIamPolicyRequest, - dict, - ], -) -def test_get_iam_policy_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + return_value = gsad_backup.Backup( + database="database_value", + name="name_value", + size_bytes=1089, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=["referencing_databases_value"], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=["referencing_backups_value"], + backup_schedules=["backup_schedules_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_iam_policy(request) + response = client.update_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, gsad_backup.Backup) + assert response.database == "database_value" + assert response.name == "name_value" + assert response.size_bytes == 1089 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ["referencing_databases_value"] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ["referencing_backups_value"] + assert response.backup_schedules == ["backup_schedules_value"] -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): +def test_update_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12576,37 +16443,36 @@ def test_get_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods + assert client._transport.update_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc request = {} - client.get_iam_policy(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_iam_policy(request) + client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_iam_policy_rest_required_fields( - request_type=iam_policy_pb2.GetIamPolicyRequest, +def test_update_backup_rest_required_fields( + request_type=gsad_backup.UpdateBackupRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["resource"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -12615,21 +16481,19 @@ def test_get_iam_policy_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_iam_policy._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12638,7 +16502,7 @@ def test_get_iam_policy_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = gsad_backup.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12647,10 +16511,10 @@ def test_get_iam_policy_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -12659,29 +16523,39 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_iam_policy(request) + response = client.update_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_iam_policy_rest_unset_required_fields(): +def test_update_backup_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource",))) + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "backup", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): +def test_update_backup_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12694,13 +16568,15 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_get_iam_policy" + transports.DatabaseAdminRestInterceptor, "post_update_backup" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_get_iam_policy" + transports.DatabaseAdminRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() + pb_message = gsad_backup.UpdateBackupRequest.pb( + gsad_backup.UpdateBackupRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12711,17 +16587,17 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value._content = gsad_backup.Backup.to_json(gsad_backup.Backup()) - request = iam_policy_pb2.GetIamPolicyRequest() + request = gsad_backup.UpdateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() + post.return_value = gsad_backup.Backup() - client.get_iam_policy( + client.update_backup( request, metadata=[ ("key", "val"), @@ -12733,8 +16609,8 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=gsad_backup.UpdateBackupRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12742,7 +16618,9 @@ def test_get_iam_policy_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} + request_init = { + "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12754,10 +16632,10 @@ def test_get_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_iam_policy(request) + client.update_backup(request) -def test_get_iam_policy_rest_flattened(): +def test_update_backup_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12766,40 +16644,43 @@ def test_get_iam_policy_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() + return_value = gsad_backup.Backup() # get arguments that satisfy an http rule for this method sample_request = { - "resource": "projects/sample1/instances/sample2/databases/sample3" + "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} } # get truthy value for each flattened field mock_args = dict( - resource="resource_value", + backup=gsad_backup.Backup(database="database_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_iam_policy(**mock_args) + client.update_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy" + "%s/v1/{backup.name=projects/*/instances/*/backups/*}" % client.transport._host, args[1], ) -def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): +def test_update_backup_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12808,13 +16689,14 @@ def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database="database_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_iam_policy_rest_error(): +def test_update_backup_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -12823,42 +16705,39 @@ def test_get_iam_policy_rest_error(): @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, + backup.DeleteBackupRequest, dict, ], ) -def test_test_iam_permissions_rest(request_type): +def test_delete_backup_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.test_iam_permissions(request) + response = client.delete_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + assert response is None -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): +def test_delete_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12872,42 +16751,35 @@ def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.test_iam_permissions in client._transport._wrapped_methods - ) + assert client._transport.delete_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.test_iam_permissions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc request = {} - client.test_iam_permissions(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.test_iam_permissions(request) + client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_test_iam_permissions_rest_required_fields( - request_type=iam_policy_pb2.TestIamPermissionsRequest, -): +def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" + request_init["name"] = "" request = request_type(**request_init) - pb_request = request + pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) @@ -12916,24 +16788,21 @@ def test_test_iam_permissions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).test_iam_permissions._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["resource"] = "resource_value" - jsonified_request["permissions"] = "permissions_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).test_iam_permissions._get_unset_required_fields(jsonified_request) + ).delete_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == "resource_value" - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == "permissions_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12942,7 +16811,7 @@ def test_test_iam_permissions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12951,49 +16820,39 @@ def test_test_iam_permissions_rest_required_fields( with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - pb_request = request + pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.test_iam_permissions(request) + response = client.delete_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_test_iam_permissions_rest_unset_required_fields(): +def test_delete_backup_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "resource", - "permissions", - ) - ) - ) + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): +def test_delete_backup_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13006,13 +16865,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions" - ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_test_iam_permissions" + transports.DatabaseAdminRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() + pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13023,19 +16879,15 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - iam_policy_pb2.TestIamPermissionsResponse() - ) - request = iam_policy_pb2.TestIamPermissionsRequest() + request = backup.DeleteBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions( + client.delete_backup( request, metadata=[ ("key", "val"), @@ -13044,11 +16896,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_test_iam_permissions_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=backup.DeleteBackupRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13056,7 +16907,7 @@ def test_test_iam_permissions_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"resource": "projects/sample1/instances/sample2/databases/sample3"} + request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -13068,10 +16919,10 @@ def test_test_iam_permissions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.test_iam_permissions(request) + client.delete_backup(request) -def test_test_iam_permissions_rest_flattened(): +def test_delete_backup_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13080,41 +16931,37 @@ def test_test_iam_permissions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = { - "resource": "projects/sample1/instances/sample2/databases/sample3" - } + sample_request = {"name": "projects/sample1/instances/sample2/backups/sample3"} # get truthy value for each flattened field mock_args = dict( - resource="resource_value", - permissions=["permissions_value"], + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.test_iam_permissions(**mock_args) + client.delete_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions" - % client.transport._host, + "%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1], ) -def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13123,14 +16970,13 @@ def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource="resource_value", - permissions=["permissions_value"], + client.delete_backup( + backup.DeleteBackupRequest(), + name="name_value", ) -def test_test_iam_permissions_rest_error(): +def test_delete_backup_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13139,11 +16985,11 @@ def test_test_iam_permissions_rest_error(): @pytest.mark.parametrize( "request_type", [ - gsad_backup.CreateBackupRequest, + backup.ListBackupsRequest, dict, ], ) -def test_create_backup_rest(request_type): +def test_list_backups_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13151,128 +16997,32 @@ def test_create_backup_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/instances/sample2"} - request_init["backup"] = { - "database": "database_value", - "version_time": {"seconds": 751, "nanos": 543}, - "expire_time": {}, - "name": "name_value", - "create_time": {}, - "size_bytes": 1089, - "state": 1, - "referencing_databases": [ - "referencing_databases_value1", - "referencing_databases_value2", - ], - "encryption_info": { - "encryption_type": 1, - "encryption_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "kms_key_version": "kms_key_version_value", - }, - "encryption_information": {}, - "database_dialect": 1, - "referencing_backups": [ - "referencing_backups_value1", - "referencing_backups_value2", - ], - "max_expire_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.ListBackupsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_backup(request) + response = client.list_backups(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_backup_rest_use_cached_wrapped_rpc(): +def test_list_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13286,40 +17036,33 @@ def test_create_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.create_backup(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_rest_required_fields( - request_type=gsad_backup.CreateBackupRequest, -): +def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): transport_class = transports.DatabaseAdminRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13327,28 +17070,25 @@ def test_create_backup_rest_required_fields( ) # verify fields with default values are dropped - assert "backupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == request_init["backup_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["backupId"] = "backup_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "backup_id", - "encryption_config", + "filter", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -13356,8 +17096,6 @@ def test_create_backup_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == "backup_id_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13366,7 +17104,7 @@ def test_create_backup_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13378,57 +17116,48 @@ def test_create_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_backup(request) + response = client.list_backups(request) - expected_params = [ - ( - "backupId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_backup_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup._get_unset_required_fields({}) + unset_fields = transport.list_backups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "backupId", - "encryptionConfig", - ) - ) - & set( - ( - "parent", - "backupId", - "backup", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_rest_interceptors(null_interceptor): +def test_list_backups_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13441,17 +17170,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_create_backup" + transports.DatabaseAdminRestInterceptor, "post_list_backups" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_create_backup" + transports.DatabaseAdminRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gsad_backup.CreateBackupRequest.pb( - gsad_backup.CreateBackupRequest() - ) + pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13462,19 +17187,19 @@ def test_create_backup_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = backup.ListBackupsResponse.to_json( + backup.ListBackupsResponse() ) - request = gsad_backup.CreateBackupRequest() + request = backup.ListBackupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = backup.ListBackupsResponse() - client.create_backup( + client.list_backups( request, metadata=[ ("key", "val"), @@ -13486,8 +17211,8 @@ def test_create_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_backup_rest_bad_request( - transport: str = "rest", request_type=gsad_backup.CreateBackupRequest +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=backup.ListBackupsRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13507,10 +17232,10 @@ def test_create_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_backup(request) + client.list_backups(request) -def test_create_backup_rest_flattened(): +def test_list_backups_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13519,7 +17244,7 @@ def test_create_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup.ListBackupsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/instances/sample2"} @@ -13527,19 +17252,19 @@ def test_create_backup_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup=gsad_backup.Backup(database="database_value"), - backup_id="backup_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_backup(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -13551,7 +17276,7 @@ def test_create_backup_rest_flattened(): ) -def test_create_backup_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13560,28 +17285,81 @@ def test_create_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup( - gsad_backup.CreateBackupRequest(), + client.list_backups( + backup.ListBackupsRequest(), parent="parent_value", - backup=gsad_backup.Backup(database="database_value"), - backup_id="backup_id_value", ) -def test_create_backup_rest_error(): +def test_list_backups_rest_pager(transport: str = "rest"): client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token="abc", + ), + backup.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token="ghi", + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/instances/sample2"} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - backup.CopyBackupRequest, + spanner_database_admin.RestoreDatabaseRequest, dict, ], ) -def test_copy_backup_rest(request_type): +def test_restore_database_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13603,13 +17381,13 @@ def test_copy_backup_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.copy_backup(request) + response = client.restore_database(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_copy_backup_rest_use_cached_wrapped_rpc(): +def test_restore_database_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13623,17 +17401,19 @@ def test_copy_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.copy_backup in client._transport._wrapped_methods + assert client._transport.restore_database in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.restore_database + ] = mock_rpc request = {} - client.copy_backup(request) + client.restore_database(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13642,20 +17422,21 @@ def test_copy_backup_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.copy_backup(request) + client.restore_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest): +def test_restore_database_rest_required_fields( + request_type=spanner_database_admin.RestoreDatabaseRequest, +): transport_class = transports.DatabaseAdminRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_id"] = "" - request_init["source_backup"] = "" + request_init["database_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13666,27 +17447,24 @@ def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest) unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).copy_backup._get_unset_required_fields(jsonified_request) + ).restore_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["backupId"] = "backup_id_value" - jsonified_request["sourceBackup"] = "source_backup_value" + jsonified_request["databaseId"] = "database_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).copy_backup._get_unset_required_fields(jsonified_request) + ).restore_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == "backup_id_value" - assert "sourceBackup" in jsonified_request - assert jsonified_request["sourceBackup"] == "source_backup_value" + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == "database_id_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13720,34 +17498,32 @@ def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.copy_backup(request) + response = client.restore_database(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_copy_backup_rest_unset_required_fields(): +def test_restore_database_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.copy_backup._get_unset_required_fields({}) + unset_fields = transport.restore_database._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( "parent", - "backupId", - "sourceBackup", - "expireTime", + "databaseId", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_copy_backup_rest_interceptors(null_interceptor): +def test_restore_database_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -13762,13 +17538,15 @@ def test_copy_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_copy_backup" + transports.DatabaseAdminRestInterceptor, "post_restore_database" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_copy_backup" + transports.DatabaseAdminRestInterceptor, "pre_restore_database" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backup.CopyBackupRequest.pb(backup.CopyBackupRequest()) + pb_message = spanner_database_admin.RestoreDatabaseRequest.pb( + spanner_database_admin.RestoreDatabaseRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13783,7 +17561,7 @@ def test_copy_backup_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backup.CopyBackupRequest() + request = spanner_database_admin.RestoreDatabaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -13791,7 +17569,7 @@ def test_copy_backup_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.copy_backup( + client.restore_database( request, metadata=[ ("key", "val"), @@ -13803,8 +17581,8 @@ def test_copy_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_copy_backup_rest_bad_request( - transport: str = "rest", request_type=backup.CopyBackupRequest +def test_restore_database_rest_bad_request( + transport: str = "rest", request_type=spanner_database_admin.RestoreDatabaseRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13824,10 +17602,10 @@ def test_copy_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.copy_backup(request) + client.restore_database(request) -def test_copy_backup_rest_flattened(): +def test_restore_database_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13844,9 +17622,7 @@ def test_copy_backup_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - expire_time=timestamp_pb2.Timestamp(seconds=751), + database_id="database_id_value", ) mock_args.update(sample_request) @@ -13857,20 +17633,20 @@ def test_copy_backup_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.copy_backup(**mock_args) + client.restore_database(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/backups:copy" + "%s/v1/{parent=projects/*/instances/*}/databases:restore" % client.transport._host, args[1], ) -def test_copy_backup_rest_flattened_error(transport: str = "rest"): +def test_restore_database_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13879,16 +17655,15 @@ def test_copy_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.copy_backup( - backup.CopyBackupRequest(), + client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - expire_time=timestamp_pb2.Timestamp(seconds=751), + database_id="database_id_value", + backup="backup_value", ) -def test_copy_backup_rest_error(): +def test_restore_database_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -13897,56 +17672,46 @@ def test_copy_backup_rest_error(): @pytest.mark.parametrize( "request_type", [ - backup.GetBackupRequest, + spanner_database_admin.ListDatabaseOperationsRequest, dict, ], ) -def test_get_backup_rest(request_type): +def test_list_database_operations_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.Backup( - database="database_value", - name="name_value", - size_bytes=1089, - state=backup.Backup.State.CREATING, - referencing_databases=["referencing_databases_value"], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=["referencing_backups_value"], + return_value = spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_backup(request) + response = client.list_database_operations(request) # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == "database_value" - assert response.name == "name_value" - assert response.size_bytes == 1089 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ["referencing_databases_value"] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ["referencing_backups_value"] + assert isinstance(response, pagers.ListDatabaseOperationsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_backup_rest_use_cached_wrapped_rpc(): +def test_list_database_operations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13960,33 +17725,40 @@ def test_get_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert ( + client._transport.list_database_operations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_database_operations + ] = mock_rpc request = {} - client.get_backup(request) + client.list_database_operations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + client.list_database_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): +def test_list_database_operations_rest_required_fields( + request_type=spanner_database_admin.ListDatabaseOperationsRequest, +): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13997,21 +17769,29 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).list_database_operations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).list_database_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14020,7 +17800,7 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup.Backup() + return_value = spanner_database_admin.ListDatabaseOperationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14041,30 +17821,41 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_backup(request) + response = client.list_database_operations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_rest_unset_required_fields(): +def test_list_database_operations_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_database_operations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): +def test_list_database_operations_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14077,13 +17868,15 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_get_backup" + transports.DatabaseAdminRestInterceptor, "post_list_database_operations" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_get_backup" + transports.DatabaseAdminRestInterceptor, "pre_list_database_operations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) + pb_message = spanner_database_admin.ListDatabaseOperationsRequest.pb( + spanner_database_admin.ListDatabaseOperationsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14094,17 +17887,21 @@ def test_get_backup_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backup.Backup.to_json(backup.Backup()) + req.return_value._content = ( + spanner_database_admin.ListDatabaseOperationsResponse.to_json( + spanner_database_admin.ListDatabaseOperationsResponse() + ) + ) - request = backup.GetBackupRequest() + request = spanner_database_admin.ListDatabaseOperationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup.Backup() + post.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - client.get_backup( + client.list_database_operations( request, metadata=[ ("key", "val"), @@ -14116,8 +17913,9 @@ def test_get_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_backup_rest_bad_request( - transport: str = "rest", request_type=backup.GetBackupRequest +def test_list_database_operations_rest_bad_request( + transport: str = "rest", + request_type=spanner_database_admin.ListDatabaseOperationsRequest, ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14125,7 +17923,7 @@ def test_get_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14137,10 +17935,10 @@ def test_get_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_backup(request) + client.list_database_operations(request) -def test_get_backup_rest_flattened(): +def test_list_database_operations_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14149,14 +17947,14 @@ def test_get_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.Backup() + return_value = spanner_database_admin.ListDatabaseOperationsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/instances/sample2/backups/sample3"} + sample_request = {"parent": "projects/sample1/instances/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -14164,24 +17962,27 @@ def test_get_backup_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_backup(**mock_args) + client.list_database_operations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, + "%s/v1/{parent=projects/*/instances/*}/databaseOperations" + % client.transport._host, args[1], ) -def test_get_backup_rest_flattened_error(transport: str = "rest"): +def test_list_database_operations_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14190,174 +17991,117 @@ def test_get_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), - name="name_value", + client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent="parent_value", ) -def test_get_backup_rest_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - gsad_backup.UpdateBackupRequest, - dict, - ], -) -def test_update_backup_rest(request_type): +def test_list_database_operations_rest_pager(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} - } - request_init["backup"] = { - "database": "database_value", - "version_time": {"seconds": 751, "nanos": 543}, - "expire_time": {}, - "name": "projects/sample1/instances/sample2/backups/sample3", - "create_time": {}, - "size_bytes": 1089, - "state": 1, - "referencing_databases": [ - "referencing_databases_value1", - "referencing_databases_value2", - ], - "encryption_info": { - "encryption_type": 1, - "encryption_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), ], - }, - "kms_key_version": "kms_key_version_value", - }, - "encryption_information": {}, - "database_dialect": 1, - "referencing_backups": [ - "referencing_backups_value1", - "referencing_backups_value2", - ], - "max_expire_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] + next_page_token="abc", + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token="def", + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token="ghi", + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple( + spanner_database_admin.ListDatabaseOperationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = {"parent": "projects/sample1/instances/sample2"} - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_database_operations(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) - subfields_not_in_runtime = [] + pages = list(client.list_database_operations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backup.ListBackupOperationsRequest, + dict, + ], +) +def test_list_backup_operations_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup( - database="database_value", - name="name_value", - size_bytes=1089, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=["referencing_databases_value"], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=["referencing_backups_value"], + return_value = backup.ListBackupOperationsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) + return_value = backup.ListBackupOperationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_backup(request) + response = client.list_backup_operations(request) # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == "database_value" - assert response.name == "name_value" - assert response.size_bytes == 1089 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ["referencing_databases_value"] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ["referencing_backups_value"] + assert isinstance(response, pagers.ListBackupOperationsPager) + assert response.next_page_token == "next_page_token_value" -def test_update_backup_rest_use_cached_wrapped_rpc(): +def test_list_backup_operations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14371,34 +18115,40 @@ def test_update_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods + assert ( + client._transport.list_backup_operations + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_backup_operations + ] = mock_rpc request = {} - client.update_backup(request) + client.list_backup_operations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_backup(request) + client.list_backup_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_backup_rest_required_fields( - request_type=gsad_backup.UpdateBackupRequest, +def test_list_backup_operations_rest_required_fields( + request_type=backup.ListBackupOperationsRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14409,19 +18159,29 @@ def test_update_backup_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).list_backup_operations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).list_backup_operations._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14430,7 +18190,7 @@ def test_update_backup_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup() + return_value = backup.ListBackupOperationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14442,48 +18202,48 @@ def test_update_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) + return_value = backup.ListBackupOperationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_backup(request) + response = client.list_backup_operations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_backup_rest_unset_required_fields(): +def test_list_backup_operations_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_backup._get_unset_required_fields({}) + unset_fields = transport.list_backup_operations._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) - & set( + set( ( - "backup", - "updateMask", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): +def test_list_backup_operations_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14496,14 +18256,14 @@ def test_update_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_update_backup" + transports.DatabaseAdminRestInterceptor, "post_list_backup_operations" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_update_backup" + transports.DatabaseAdminRestInterceptor, "pre_list_backup_operations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gsad_backup.UpdateBackupRequest.pb( - gsad_backup.UpdateBackupRequest() + pb_message = backup.ListBackupOperationsRequest.pb( + backup.ListBackupOperationsRequest() ) transcode.return_value = { "method": "post", @@ -14515,17 +18275,19 @@ def test_update_backup_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gsad_backup.Backup.to_json(gsad_backup.Backup()) + req.return_value._content = backup.ListBackupOperationsResponse.to_json( + backup.ListBackupOperationsResponse() + ) - request = gsad_backup.UpdateBackupRequest() + request = backup.ListBackupOperationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gsad_backup.Backup() + post.return_value = backup.ListBackupOperationsResponse() - client.update_backup( + client.list_backup_operations( request, metadata=[ ("key", "val"), @@ -14537,8 +18299,8 @@ def test_update_backup_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_backup_rest_bad_request( - transport: str = "rest", request_type=gsad_backup.UpdateBackupRequest +def test_list_backup_operations_rest_bad_request( + transport: str = "rest", request_type=backup.ListBackupOperationsRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14546,9 +18308,7 @@ def test_update_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} - } + request_init = {"parent": "projects/sample1/instances/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14560,10 +18320,10 @@ def test_update_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_backup(request) + client.list_backup_operations(request) -def test_update_backup_rest_flattened(): +def test_list_backup_operations_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14572,17 +18332,14 @@ def test_update_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup() + return_value = backup.ListBackupOperationsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} - } + sample_request = {"parent": "projects/sample1/instances/sample2"} # get truthy value for each flattened field mock_args = dict( - backup=gsad_backup.Backup(database="database_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) @@ -14590,25 +18347,25 @@ def test_update_backup_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) + return_value = backup.ListBackupOperationsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_backup(**mock_args) + client.list_backup_operations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{backup.name=projects/*/instances/*/backups/*}" + "%s/v1/{parent=projects/*/instances/*}/backupOperations" % client.transport._host, args[1], ) -def test_update_backup_rest_flattened_error(transport: str = "rest"): +def test_list_backup_operations_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14617,55 +18374,116 @@ def test_update_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database="database_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent="parent_value", ) -def test_update_backup_rest_error(): +def test_list_backup_operations_rest_pager(transport: str = "rest"): client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token="abc", + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token="def", + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token="ghi", + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backup.ListBackupOperationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/instances/sample2"} + + pager = client.list_backup_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) + + pages = list(client.list_backup_operations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - backup.DeleteBackupRequest, + spanner_database_admin.ListDatabaseRolesRequest, dict, ], ) -def test_delete_backup_rest(request_type): +def test_list_database_roles_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = spanner_database_admin.ListDatabaseRolesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_backup(request) + response = client.list_database_roles(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListDatabaseRolesPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_backup_rest_use_cached_wrapped_rpc(): +def test_list_database_roles_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14679,33 +18497,39 @@ def test_delete_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert ( + client._transport.list_database_roles in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_database_roles + ] = mock_rpc request = {} - client.delete_backup(request) + client.list_database_roles(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_backup(request) + client.list_database_roles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): +def test_list_database_roles_rest_required_fields( + request_type=spanner_database_admin.ListDatabaseRolesRequest, +): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14716,21 +18540,28 @@ def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).list_database_roles._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).list_database_roles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14739,7 +18570,7 @@ def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = spanner_database_admin.ListDatabaseRolesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14751,36 +18582,49 @@ def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_backup(request) + response = client.list_database_roles(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_rest_unset_required_fields(): +def test_list_database_roles_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_database_roles._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): +def test_list_database_roles_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -14793,10 +18637,15 @@ def test_delete_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_delete_backup" + transports.DatabaseAdminRestInterceptor, "post_list_database_roles" + ) as post, mock.patch.object( + transports.DatabaseAdminRestInterceptor, "pre_list_database_roles" ) as pre: pre.assert_not_called() - pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) + post.assert_not_called() + pb_message = spanner_database_admin.ListDatabaseRolesRequest.pb( + spanner_database_admin.ListDatabaseRolesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14807,15 +18656,21 @@ def test_delete_backup_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = ( + spanner_database_admin.ListDatabaseRolesResponse.to_json( + spanner_database_admin.ListDatabaseRolesResponse() + ) + ) - request = backup.DeleteBackupRequest() + request = spanner_database_admin.ListDatabaseRolesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabaseRolesResponse() - client.delete_backup( + client.list_database_roles( request, metadata=[ ("key", "val"), @@ -14824,10 +18679,12 @@ def test_delete_backup_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_backup_rest_bad_request( - transport: str = "rest", request_type=backup.DeleteBackupRequest +def test_list_database_roles_rest_bad_request( + transport: str = "rest", + request_type=spanner_database_admin.ListDatabaseRolesRequest, ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14835,7 +18692,7 @@ def test_delete_backup_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/backups/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -14847,10 +18704,10 @@ def test_delete_backup_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_backup(request) + client.list_database_roles(request) -def test_delete_backup_rest_flattened(): +def test_list_database_roles_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14859,37 +18716,42 @@ def test_delete_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = spanner_database_admin.ListDatabaseRolesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/instances/sample2/backups/sample3"} + sample_request = { + "parent": "projects/sample1/instances/sample2/databases/sample3" + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_backup(**mock_args) + client.list_database_roles(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, + "%s/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles" + % client.transport._host, args[1], ) -def test_delete_backup_rest_flattened_error(transport: str = "rest"): +def test_list_database_roles_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14898,59 +18760,206 @@ def test_delete_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name="name_value", + client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent="parent_value", ) -def test_delete_backup_rest_error(): +def test_list_database_roles_rest_pager(transport: str = "rest"): client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token="abc", + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token="def", + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token="ghi", + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + spanner_database_admin.ListDatabaseRolesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/instances/sample2/databases/sample3" + } + + pager = client.list_database_roles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) for i in results) + + pages = list(client.list_database_roles(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - backup.ListBackupsRequest, + gsad_backup_schedule.CreateBackupScheduleRequest, dict, ], ) -def test_list_backups_rest(request_type): +def test_create_backup_schedule_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2/databases/sample3"} + request_init["backup_schedule"] = { + "name": "name_value", + "spec": { + "cron_spec": { + "text": "text_value", + "time_zone": "time_zone_value", + "creation_window": {"seconds": 751, "nanos": 543}, + } + }, + "retention_duration": {}, + "encryption_config": { + "encryption_type": 1, + "kms_key_name": "kms_key_name_value", + "kms_key_names": ["kms_key_names_value1", "kms_key_names_value2"], + }, + "full_backup_spec": {}, + "update_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup_schedule.CreateBackupScheduleRequest.meta.fields[ + "backup_schedule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse( - next_page_token="next_page_token_value", + return_value = gsad_backup_schedule.BackupSchedule( + name="name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_backups(request) + response = client.create_backup_schedule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == "name_value" -def test_list_backups_rest_use_cached_wrapped_rpc(): +def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14964,33 +18973,41 @@ def test_list_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert ( + client._transport.create_backup_schedule + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_backup_schedule + ] = mock_rpc request = {} - client.list_backups(request) + client.create_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.create_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): +def test_create_backup_schedule_rest_required_fields( + request_type=gsad_backup_schedule.CreateBackupScheduleRequest, +): transport_class = transports.DatabaseAdminRestTransport request_init = {} request_init["parent"] = "" + request_init["backup_schedule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14998,32 +19015,32 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques ) # verify fields with default values are dropped + assert "backupScheduleId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).create_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupScheduleId" in jsonified_request + assert jsonified_request["backupScheduleId"] == request_init["backup_schedule_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["backupScheduleId"] = "backup_schedule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).create_backup_schedule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("backup_schedule_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "backupScheduleId" in jsonified_request + assert jsonified_request["backupScheduleId"] == "backup_schedule_id_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15032,7 +19049,7 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() + return_value = gsad_backup_schedule.BackupSchedule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15044,48 +19061,55 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_backups(request) + response = client.create_backup_schedule(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupScheduleId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backups_rest_unset_required_fields(): +def test_create_backup_schedule_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backups._get_unset_required_fields({}) + unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("backupScheduleId",)) + & set( ( - "filter", - "pageSize", - "pageToken", + "parent", + "backupScheduleId", + "backupSchedule", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): +def test_create_backup_schedule_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15098,13 +19122,15 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_list_backups" + transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_list_backups" + transports.DatabaseAdminRestInterceptor, "pre_create_backup_schedule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) + pb_message = gsad_backup_schedule.CreateBackupScheduleRequest.pb( + gsad_backup_schedule.CreateBackupScheduleRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15115,19 +19141,19 @@ def test_list_backups_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backup.ListBackupsResponse.to_json( - backup.ListBackupsResponse() + req.return_value._content = gsad_backup_schedule.BackupSchedule.to_json( + gsad_backup_schedule.BackupSchedule() ) - request = backup.ListBackupsRequest() + request = gsad_backup_schedule.CreateBackupScheduleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup.ListBackupsResponse() + post.return_value = gsad_backup_schedule.BackupSchedule() - client.list_backups( + client.create_backup_schedule( request, metadata=[ ("key", "val"), @@ -15139,8 +19165,9 @@ def test_list_backups_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_backups_rest_bad_request( - transport: str = "rest", request_type=backup.ListBackupsRequest +def test_create_backup_schedule_rest_bad_request( + transport: str = "rest", + request_type=gsad_backup_schedule.CreateBackupScheduleRequest, ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15148,7 +19175,7 @@ def test_list_backups_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"parent": "projects/sample1/instances/sample2/databases/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15160,10 +19187,10 @@ def test_list_backups_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_backups(request) + client.create_backup_schedule(request) -def test_list_backups_rest_flattened(): +def test_create_backup_schedule_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15172,14 +19199,18 @@ def test_list_backups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() + return_value = gsad_backup_schedule.BackupSchedule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = { + "parent": "projects/sample1/instances/sample2/databases/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + backup_schedule_id="backup_schedule_id_value", ) mock_args.update(sample_request) @@ -15187,24 +19218,25 @@ def test_list_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_backups(**mock_args) + client.create_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, + "%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" + % client.transport._host, args[1], ) -def test_list_backups_rest_flattened_error(transport: str = "rest"): +def test_create_backup_schedule_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15213,109 +19245,63 @@ def test_list_backups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), + client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + backup_schedule_id="backup_schedule_id_value", ) -def test_list_backups_rest_pager(transport: str = "rest"): +def test_create_backup_schedule_rest_error(): client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token="abc", - ), - backup.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token="ghi", - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/instances/sample2"} - - pager = client.list_backups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup.Backup) for i in results) - - pages = list(client.list_backups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.RestoreDatabaseRequest, + backup_schedule.GetBackupScheduleRequest, dict, ], ) -def test_restore_database_rest(request_type): +def test_get_backup_schedule_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_schedule.BackupSchedule( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restore_database(request) + response = client.get_backup_schedule(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == "name_value" -def test_restore_database_rest_use_cached_wrapped_rpc(): +def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15329,7 +19315,9 @@ def test_restore_database_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods + assert ( + client._transport.get_backup_schedule in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -15337,34 +19325,29 @@ def test_restore_database_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.restore_database + client._transport.get_backup_schedule ] = mock_rpc request = {} - client.restore_database(request) + client.get_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) + client.get_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_restore_database_rest_required_fields( - request_type=spanner_database_admin.RestoreDatabaseRequest, +def test_get_backup_schedule_rest_required_fields( + request_type=backup_schedule.GetBackupScheduleRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15375,24 +19358,21 @@ def test_restore_database_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_database._get_unset_required_fields(jsonified_request) + ).get_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["databaseId"] = "database_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_database._get_unset_required_fields(jsonified_request) + ).get_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == "database_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15401,7 +19381,7 @@ def test_restore_database_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_schedule.BackupSchedule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15413,45 +19393,39 @@ def test_restore_database_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restore_database(request) + response = client.get_backup_schedule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_restore_database_rest_unset_required_fields(): +def test_get_backup_schedule_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.restore_database._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "databaseId", - ) - ) - ) + unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_database_rest_interceptors(null_interceptor): +def test_get_backup_schedule_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15464,16 +19438,14 @@ def test_restore_database_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_restore_database" + transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_restore_database" + transports.DatabaseAdminRestInterceptor, "pre_get_backup_schedule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.RestoreDatabaseRequest.pb( - spanner_database_admin.RestoreDatabaseRequest() + pb_message = backup_schedule.GetBackupScheduleRequest.pb( + backup_schedule.GetBackupScheduleRequest() ) transcode.return_value = { "method": "post", @@ -15485,19 +19457,19 @@ def test_restore_database_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = backup_schedule.BackupSchedule.to_json( + backup_schedule.BackupSchedule() ) - request = spanner_database_admin.RestoreDatabaseRequest() + request = backup_schedule.GetBackupScheduleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = backup_schedule.BackupSchedule() - client.restore_database( + client.get_backup_schedule( request, metadata=[ ("key", "val"), @@ -15509,8 +19481,8 @@ def test_restore_database_rest_interceptors(null_interceptor): post.assert_called_once() -def test_restore_database_rest_bad_request( - transport: str = "rest", request_type=spanner_database_admin.RestoreDatabaseRequest +def test_get_backup_schedule_rest_bad_request( + transport: str = "rest", request_type=backup_schedule.GetBackupScheduleRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15518,7 +19490,9 @@ def test_restore_database_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15530,10 +19504,10 @@ def test_restore_database_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.restore_database(request) + client.get_backup_schedule(request) -def test_restore_database_rest_flattened(): +def test_get_backup_schedule_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15542,39 +19516,42 @@ def test_restore_database_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backup_schedule.BackupSchedule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - database_id="database_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.restore_database(**mock_args) + client.get_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/databases:restore" + "%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1], ) -def test_restore_database_rest_flattened_error(transport: str = "rest"): +def test_get_backup_schedule_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15583,15 +19560,13 @@ def test_restore_database_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent="parent_value", - database_id="database_id_value", - backup="backup_value", + client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name="name_value", ) -def test_restore_database_rest_error(): +def test_get_backup_schedule_rest_error(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15600,46 +19575,135 @@ def test_restore_database_rest_error(): @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.ListDatabaseOperationsRequest, + gsad_backup_schedule.UpdateBackupScheduleRequest, dict, ], ) -def test_list_database_operations_rest(request_type): +def test_update_backup_schedule_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = { + "backup_schedule": { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } + } + request_init["backup_schedule"] = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4", + "spec": { + "cron_spec": { + "text": "text_value", + "time_zone": "time_zone_value", + "creation_window": {"seconds": 751, "nanos": 543}, + } + }, + "retention_duration": {}, + "encryption_config": { + "encryption_type": 1, + "kms_key_name": "kms_key_name_value", + "kms_key_names": ["kms_key_names_value1", "kms_key_names_value2"], + }, + "full_backup_spec": {}, + "update_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup_schedule.UpdateBackupScheduleRequest.meta.fields[ + "backup_schedule" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token="next_page_token_value", + return_value = gsad_backup_schedule.BackupSchedule( + name="name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( - return_value - ) + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_database_operations(request) + response = client.update_backup_schedule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == "name_value" -def test_list_database_operations_rest_use_cached_wrapped_rpc(): +def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15654,7 +19718,7 @@ def test_list_database_operations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_database_operations + client._transport.update_backup_schedule in client._transport._wrapped_methods ) @@ -15664,29 +19728,28 @@ def test_list_database_operations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_database_operations + client._transport.update_backup_schedule ] = mock_rpc request = {} - client.list_database_operations(request) + client.update_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_database_operations(request) + client.update_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_database_operations_rest_required_fields( - request_type=spanner_database_admin.ListDatabaseOperationsRequest, +def test_update_backup_schedule_rest_required_fields( + request_type=gsad_backup_schedule.UpdateBackupScheduleRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15697,29 +19760,19 @@ def test_list_database_operations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_database_operations._get_unset_required_fields(jsonified_request) + ).update_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_database_operations._get_unset_required_fields(jsonified_request) + ).update_backup_schedule._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15728,7 +19781,7 @@ def test_list_database_operations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse() + return_value = gsad_backup_schedule.BackupSchedule() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15740,50 +19793,48 @@ def test_list_database_operations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( - return_value - ) + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_database_operations(request) + response = client.update_backup_schedule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_database_operations_rest_unset_required_fields(): +def test_update_backup_schedule_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_database_operations._get_unset_required_fields({}) + unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("updateMask",)) + & set( ( - "filter", - "pageSize", - "pageToken", + "backupSchedule", + "updateMask", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_database_operations_rest_interceptors(null_interceptor): +def test_update_backup_schedule_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15796,14 +19847,14 @@ def test_list_database_operations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_list_database_operations" + transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_list_database_operations" + transports.DatabaseAdminRestInterceptor, "pre_update_backup_schedule" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.ListDatabaseOperationsRequest.pb( - spanner_database_admin.ListDatabaseOperationsRequest() + pb_message = gsad_backup_schedule.UpdateBackupScheduleRequest.pb( + gsad_backup_schedule.UpdateBackupScheduleRequest() ) transcode.return_value = { "method": "post", @@ -15815,21 +19866,19 @@ def test_list_database_operations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - spanner_database_admin.ListDatabaseOperationsResponse.to_json( - spanner_database_admin.ListDatabaseOperationsResponse() - ) + req.return_value._content = gsad_backup_schedule.BackupSchedule.to_json( + gsad_backup_schedule.BackupSchedule() ) - request = spanner_database_admin.ListDatabaseOperationsRequest() + request = gsad_backup_schedule.UpdateBackupScheduleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + post.return_value = gsad_backup_schedule.BackupSchedule() - client.list_database_operations( + client.update_backup_schedule( request, metadata=[ ("key", "val"), @@ -15841,9 +19890,9 @@ def test_list_database_operations_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_database_operations_rest_bad_request( +def test_update_backup_schedule_rest_bad_request( transport: str = "rest", - request_type=spanner_database_admin.ListDatabaseOperationsRequest, + request_type=gsad_backup_schedule.UpdateBackupScheduleRequest, ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15851,7 +19900,11 @@ def test_list_database_operations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = { + "backup_schedule": { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15863,10 +19916,10 @@ def test_list_database_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_database_operations(request) + client.update_backup_schedule(request) -def test_list_database_operations_rest_flattened(): +def test_update_backup_schedule_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15875,14 +19928,19 @@ def test_list_database_operations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse() + return_value = gsad_backup_schedule.BackupSchedule() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = { + "backup_schedule": { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -15890,27 +19948,25 @@ def test_list_database_operations_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( - return_value - ) + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_database_operations(**mock_args) + client.update_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/databaseOperations" + "%s/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1], ) -def test_list_database_operations_rest_flattened_error(transport: str = "rest"): +def test_update_backup_schedule_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15919,117 +19975,57 @@ def test_list_database_operations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent="parent_value", + client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_database_operations_rest_pager(transport: str = "rest"): +def test_update_backup_schedule_rest_error(): client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token="abc", - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token="def", - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token="ghi", - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - spanner_database_admin.ListDatabaseOperationsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/instances/sample2"} - - pager = client.list_database_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) for i in results) - - pages = list(client.list_database_operations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - backup.ListBackupOperationsRequest, + backup_schedule.DeleteBackupScheduleRequest, dict, ], ) -def test_list_backup_operations_rest(request_type): +def test_delete_backup_schedule_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_backup_operations(request) + response = client.delete_backup_schedule(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsPager) - assert response.next_page_token == "next_page_token_value" + assert response is None -def test_list_backup_operations_rest_use_cached_wrapped_rpc(): +def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16044,7 +20040,7 @@ def test_list_backup_operations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_backup_operations + client._transport.delete_backup_schedule in client._transport._wrapped_methods ) @@ -16054,29 +20050,29 @@ def test_list_backup_operations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_operations + client._transport.delete_backup_schedule ] = mock_rpc request = {} - client.list_backup_operations(request) + client.delete_backup_schedule(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_operations(request) + client.delete_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backup_operations_rest_required_fields( - request_type=backup.ListBackupOperationsRequest, +def test_delete_backup_schedule_rest_required_fields( + request_type=backup_schedule.DeleteBackupScheduleRequest, ): transport_class = transports.DatabaseAdminRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16087,29 +20083,21 @@ def test_list_backup_operations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_operations._get_unset_required_fields(jsonified_request) + ).delete_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).delete_backup_schedule._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16118,7 +20106,7 @@ def test_list_backup_operations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16130,48 +20118,36 @@ def test_list_backup_operations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_backup_operations(request) + response = client.delete_backup_schedule(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backup_operations_rest_unset_required_fields(): +def test_delete_backup_schedule_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backup_operations._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_operations_rest_interceptors(null_interceptor): +def test_delete_backup_schedule_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16184,14 +20160,11 @@ def test_list_backup_operations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_list_backup_operations" - ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_list_backup_operations" + transports.DatabaseAdminRestInterceptor, "pre_delete_backup_schedule" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = backup.ListBackupOperationsRequest.pb( - backup.ListBackupOperationsRequest() + pb_message = backup_schedule.DeleteBackupScheduleRequest.pb( + backup_schedule.DeleteBackupScheduleRequest() ) transcode.return_value = { "method": "post", @@ -16203,19 +20176,15 @@ def test_list_backup_operations_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backup.ListBackupOperationsResponse.to_json( - backup.ListBackupOperationsResponse() - ) - request = backup.ListBackupOperationsRequest() + request = backup_schedule.DeleteBackupScheduleRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backup.ListBackupOperationsResponse() - client.list_backup_operations( + client.delete_backup_schedule( request, metadata=[ ("key", "val"), @@ -16224,11 +20193,10 @@ def test_list_backup_operations_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_list_backup_operations_rest_bad_request( - transport: str = "rest", request_type=backup.ListBackupOperationsRequest +def test_delete_backup_schedule_rest_bad_request( + transport: str = "rest", request_type=backup_schedule.DeleteBackupScheduleRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16236,7 +20204,9 @@ def test_list_backup_operations_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16248,10 +20218,10 @@ def test_list_backup_operations_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_backup_operations(request) + client.delete_backup_schedule(request) -def test_list_backup_operations_rest_flattened(): +def test_delete_backup_schedule_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16260,40 +20230,40 @@ def test_list_backup_operations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = { + "name": "projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_backup_operations(**mock_args) + client.delete_backup_schedule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*}/backupOperations" + "%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1], ) -def test_list_backup_operations_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_schedule_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16302,83 +20272,26 @@ def test_list_backup_operations_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent="parent_value", + client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name="name_value", ) -def test_list_backup_operations_rest_pager(transport: str = "rest"): +def test_delete_backup_schedule_rest_error(): client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token="abc", - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token="def", - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token="ghi", - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backup.ListBackupOperationsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/instances/sample2"} - - pager = client.list_backup_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) for i in results) - - pages = list(client.list_backup_operations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - spanner_database_admin.ListDatabaseRolesRequest, + backup_schedule.ListBackupSchedulesRequest, dict, ], ) -def test_list_database_roles_rest(request_type): +def test_list_backup_schedules_rest(request_type): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16391,7 +20304,7 @@ def test_list_database_roles_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse( + return_value = backup_schedule.ListBackupSchedulesResponse( next_page_token="next_page_token_value", ) @@ -16399,19 +20312,19 @@ def test_list_database_roles_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_database_roles(request) + response = client.list_backup_schedules(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesPager) + assert isinstance(response, pagers.ListBackupSchedulesPager) assert response.next_page_token == "next_page_token_value" -def test_list_database_roles_rest_use_cached_wrapped_rpc(): +def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16426,7 +20339,8 @@ def test_list_database_roles_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_database_roles in client._transport._wrapped_methods + client._transport.list_backup_schedules + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16435,24 +20349,24 @@ def test_list_database_roles_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_database_roles + client._transport.list_backup_schedules ] = mock_rpc request = {} - client.list_database_roles(request) + client.list_backup_schedules(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_database_roles(request) + client.list_backup_schedules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_database_roles_rest_required_fields( - request_type=spanner_database_admin.ListDatabaseRolesRequest, +def test_list_backup_schedules_rest_required_fields( + request_type=backup_schedule.ListBackupSchedulesRequest, ): transport_class = transports.DatabaseAdminRestTransport @@ -16468,7 +20382,7 @@ def test_list_database_roles_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_database_roles._get_unset_required_fields(jsonified_request) + ).list_backup_schedules._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16477,7 +20391,7 @@ def test_list_database_roles_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_database_roles._get_unset_required_fields(jsonified_request) + ).list_backup_schedules._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -16498,7 +20412,7 @@ def test_list_database_roles_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse() + return_value = backup_schedule.ListBackupSchedulesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16519,27 +20433,25 @@ def test_list_database_roles_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb( - return_value - ) + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_database_roles(request) + response = client.list_backup_schedules(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_database_roles_rest_unset_required_fields(): +def test_list_backup_schedules_rest_unset_required_fields(): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_database_roles._get_unset_required_fields({}) + unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -16552,7 +20464,7 @@ def test_list_database_roles_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_database_roles_rest_interceptors(null_interceptor): +def test_list_backup_schedules_rest_interceptors(null_interceptor): transport = transports.DatabaseAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16565,14 +20477,14 @@ def test_list_database_roles_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "post_list_database_roles" + transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules" ) as post, mock.patch.object( - transports.DatabaseAdminRestInterceptor, "pre_list_database_roles" + transports.DatabaseAdminRestInterceptor, "pre_list_backup_schedules" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = spanner_database_admin.ListDatabaseRolesRequest.pb( - spanner_database_admin.ListDatabaseRolesRequest() + pb_message = backup_schedule.ListBackupSchedulesRequest.pb( + backup_schedule.ListBackupSchedulesRequest() ) transcode.return_value = { "method": "post", @@ -16584,21 +20496,19 @@ def test_list_database_roles_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - spanner_database_admin.ListDatabaseRolesResponse.to_json( - spanner_database_admin.ListDatabaseRolesResponse() - ) + req.return_value._content = backup_schedule.ListBackupSchedulesResponse.to_json( + backup_schedule.ListBackupSchedulesResponse() ) - request = spanner_database_admin.ListDatabaseRolesRequest() + request = backup_schedule.ListBackupSchedulesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabaseRolesResponse() + post.return_value = backup_schedule.ListBackupSchedulesResponse() - client.list_database_roles( + client.list_backup_schedules( request, metadata=[ ("key", "val"), @@ -16610,9 +20520,8 @@ def test_list_database_roles_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_database_roles_rest_bad_request( - transport: str = "rest", - request_type=spanner_database_admin.ListDatabaseRolesRequest, +def test_list_backup_schedules_rest_bad_request( + transport: str = "rest", request_type=backup_schedule.ListBackupSchedulesRequest ): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16632,10 +20541,10 @@ def test_list_database_roles_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_database_roles(request) + client.list_backup_schedules(request) -def test_list_database_roles_rest_flattened(): +def test_list_backup_schedules_rest_flattened(): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16644,7 +20553,7 @@ def test_list_database_roles_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse() + return_value = backup_schedule.ListBackupSchedulesResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -16661,25 +20570,25 @@ def test_list_database_roles_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_database_roles(**mock_args) + client.list_backup_schedules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles" + "%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1], ) -def test_list_database_roles_rest_flattened_error(transport: str = "rest"): +def test_list_backup_schedules_rest_flattened_error(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16688,13 +20597,13 @@ def test_list_database_roles_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), + client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), parent="parent_value", ) -def test_list_database_roles_rest_pager(transport: str = "rest"): +def test_list_backup_schedules_rest_pager(transport: str = "rest"): client = DatabaseAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16706,28 +20615,28 @@ def test_list_database_roles_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), ], next_page_token="abc", ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], next_page_token="def", ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), ], next_page_token="ghi", ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), ], ), ) @@ -16736,8 +20645,7 @@ def test_list_database_roles_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - spanner_database_admin.ListDatabaseRolesResponse.to_json(x) - for x in response + backup_schedule.ListBackupSchedulesResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -16749,13 +20657,13 @@ def test_list_database_roles_rest_pager(transport: str = "rest"): "parent": "projects/sample1/instances/sample2/databases/sample3" } - pager = client.list_database_roles(request=sample_request) + pager = client.list_backup_schedules(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) for i in results) + assert all(isinstance(i, backup_schedule.BackupSchedule) for i in results) - pages = list(client.list_database_roles(request=sample_request).pages) + pages = list(client.list_backup_schedules(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -16919,6 +20827,11 @@ def test_database_admin_base_transport(): "list_database_operations", "list_backup_operations", "list_database_roles", + "create_backup_schedule", + "get_backup_schedule", + "update_backup_schedule", + "delete_backup_schedule", + "list_backup_schedules", "get_operation", "cancel_operation", "delete_operation", @@ -17275,6 +21188,21 @@ def test_database_admin_client_transport_session_collision(transport_name): session1 = client1.transport.list_database_roles._session session2 = client2.transport.list_database_roles._session assert session1 != session2 + session1 = client1.transport.create_backup_schedule._session + session2 = client2.transport.create_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.get_backup_schedule._session + session2 = client2.transport.get_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.update_backup_schedule._session + session2 = client2.transport.update_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.delete_backup_schedule._session + session2 = client2.transport.delete_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.list_backup_schedules._session + session2 = client2.transport.list_backup_schedules._session + assert session1 != session2 def test_database_admin_grpc_transport_channel(): @@ -17461,11 +21389,42 @@ def test_parse_backup_path(): assert expected == actual -def test_crypto_key_path(): +def test_backup_schedule_path(): project = "cuttlefish" - location = "mussel" - key_ring = "winkle" - crypto_key = "nautilus" + instance = "mussel" + database = "winkle" + schedule = "nautilus" + expected = "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format( + project=project, + instance=instance, + database=database, + schedule=schedule, + ) + actual = DatabaseAdminClient.backup_schedule_path( + project, instance, database, schedule + ) + assert expected == actual + + +def test_parse_backup_schedule_path(): + expected = { + "project": "scallop", + "instance": "abalone", + "database": "squid", + "schedule": "clam", + } + path = DatabaseAdminClient.backup_schedule_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_backup_schedule_path(path) + assert expected == actual + + +def test_crypto_key_path(): + project = "whelk" + location = "octopus" + key_ring = "oyster" + crypto_key = "nudibranch" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( project=project, location=location, @@ -17480,10 +21439,10 @@ def test_crypto_key_path(): def test_parse_crypto_key_path(): expected = { - "project": "scallop", - "location": "abalone", - "key_ring": "squid", - "crypto_key": "clam", + "project": "cuttlefish", + "location": "mussel", + "key_ring": "winkle", + "crypto_key": "nautilus", } path = DatabaseAdminClient.crypto_key_path(**expected) @@ -17493,11 +21452,11 @@ def test_parse_crypto_key_path(): def test_crypto_key_version_path(): - project = "whelk" - location = "octopus" - key_ring = "oyster" - crypto_key = "nudibranch" - crypto_key_version = "cuttlefish" + project = "scallop" + location = "abalone" + key_ring = "squid" + crypto_key = "clam" + crypto_key_version = "whelk" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -17513,11 +21472,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "mussel", - "location": "winkle", - "key_ring": "nautilus", - "crypto_key": "scallop", - "crypto_key_version": "abalone", + "project": "octopus", + "location": "oyster", + "key_ring": "nudibranch", + "crypto_key": "cuttlefish", + "crypto_key_version": "mussel", } path = DatabaseAdminClient.crypto_key_version_path(**expected) @@ -17527,9 +21486,9 @@ def test_parse_crypto_key_version_path(): def test_database_path(): - project = "squid" - instance = "clam" - database = "whelk" + project = "winkle" + instance = "nautilus" + database = "scallop" expected = "projects/{project}/instances/{instance}/databases/{database}".format( project=project, instance=instance, @@ -17541,9 +21500,9 @@ def test_database_path(): def test_parse_database_path(): expected = { - "project": "octopus", - "instance": "oyster", - "database": "nudibranch", + "project": "abalone", + "instance": "squid", + "database": "clam", } path = DatabaseAdminClient.database_path(**expected) @@ -17553,10 +21512,10 @@ def test_parse_database_path(): def test_database_role_path(): - project = "cuttlefish" - instance = "mussel" - database = "winkle" - role = "nautilus" + project = "whelk" + instance = "octopus" + database = "oyster" + role = "nudibranch" expected = "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format( project=project, instance=instance, @@ -17569,10 +21528,10 @@ def test_database_role_path(): def test_parse_database_role_path(): expected = { - "project": "scallop", - "instance": "abalone", - "database": "squid", - "role": "clam", + "project": "cuttlefish", + "instance": "mussel", + "database": "winkle", + "role": "nautilus", } path = DatabaseAdminClient.database_role_path(**expected) @@ -17582,8 +21541,8 @@ def test_parse_database_role_path(): def test_instance_path(): - project = "whelk" - instance = "octopus" + project = "scallop" + instance = "abalone" expected = "projects/{project}/instances/{instance}".format( project=project, instance=instance, @@ -17594,8 +21553,8 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "oyster", - "instance": "nudibranch", + "project": "squid", + "instance": "clam", } path = DatabaseAdminClient.instance_path(**expected) @@ -17605,7 +21564,7 @@ def test_parse_instance_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -17615,7 +21574,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = DatabaseAdminClient.common_billing_account_path(**expected) @@ -17625,7 +21584,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -17635,7 +21594,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = DatabaseAdminClient.common_folder_path(**expected) @@ -17645,7 +21604,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -17655,7 +21614,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = DatabaseAdminClient.common_organization_path(**expected) @@ -17665,7 +21624,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -17675,7 +21634,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = DatabaseAdminClient.common_project_path(**expected) @@ -17685,8 +21644,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -17697,8 +21656,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = DatabaseAdminClient.common_location_path(**expected) diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index 77ac0d813b..138f02f9a4 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -1313,12 +1313,7 @@ async def test_list_instance_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_configs ] = mock_object @@ -1568,13 +1563,13 @@ def test_list_instance_configs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instance_configs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1931,12 +1926,7 @@ async def test_get_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_config ] = mock_object @@ -2342,12 +2332,7 @@ async def test_create_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_instance_config ] = mock_object @@ -2751,12 +2736,7 @@ async def test_update_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_instance_config ] = mock_object @@ -3150,12 +3130,7 @@ async def test_delete_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance_config ] = mock_object @@ -3538,12 +3513,7 @@ async def test_list_instance_config_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_config_operations ] = mock_object @@ -3799,13 +3769,13 @@ def test_list_instance_config_operations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instance_config_operations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4129,12 +4099,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_instances ] = mock_object @@ -4374,13 +4339,13 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4709,12 +4674,7 @@ async def test_list_instance_partitions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_partitions ] = mock_object @@ -4966,13 +4926,13 @@ def test_list_instance_partitions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instance_partitions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5311,12 +5271,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_instance ] = mock_object @@ -5694,12 +5649,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_instance ] = mock_object @@ -6080,12 +6030,7 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_instance ] = mock_object @@ -6454,12 +6399,7 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance ] = mock_object @@ -6818,12 +6758,7 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy ] = mock_object @@ -7206,12 +7141,7 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy ] = mock_object @@ -7602,12 +7532,7 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions ] = mock_object @@ -8043,12 +7968,7 @@ async def test_get_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_partition ] = mock_object @@ -8449,12 +8369,7 @@ async def test_create_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_instance_partition ] = mock_object @@ -8866,12 +8781,7 @@ async def test_delete_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance_partition ] = mock_object @@ -9245,12 +9155,7 @@ async def test_update_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.update_instance_partition ] = mock_object @@ -9676,12 +9581,7 @@ async def test_list_instance_partition_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_partition_operations ] = mock_object @@ -9943,13 +9843,13 @@ def test_list_instance_partition_operations_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instance_partition_operations(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index 6474ed0606..fe59c2387d 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -1235,12 +1235,7 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.create_session ] = mock_object @@ -1612,12 +1607,7 @@ async def test_batch_create_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_sessions ] = mock_object @@ -2004,12 +1994,7 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.get_session ] = mock_object @@ -2377,12 +2362,7 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions ] = mock_object @@ -2619,13 +2599,13 @@ def test_list_sessions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("database", ""),)), ) pager = client.list_sessions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2929,12 +2909,7 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.delete_session ] = mock_object @@ -3286,12 +3261,7 @@ async def test_execute_sql_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.execute_sql ] = mock_object @@ -3582,12 +3552,7 @@ async def test_execute_streaming_sql_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.execute_streaming_sql ] = mock_object @@ -3880,12 +3845,7 @@ async def test_execute_batch_dml_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.execute_batch_dml ] = mock_object @@ -4166,12 +4126,7 @@ async def test_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.read ] = mock_object @@ -4451,12 +4406,7 @@ async def test_streaming_read_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.streaming_read ] = mock_object @@ -4748,12 +4698,7 @@ async def test_begin_transaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.begin_transaction ] = mock_object @@ -5154,12 +5099,7 @@ async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.commit ] = mock_object @@ -5567,12 +5507,7 @@ async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.rollback ] = mock_object @@ -5934,12 +5869,7 @@ async def test_partition_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.partition_query ] = mock_object @@ -6217,12 +6147,7 @@ async def test_partition_read_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.partition_read ] = mock_object @@ -6498,12 +6423,7 @@ async def test_batch_write_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ client._client._transport.batch_write ] = mock_object From fe20d41e6ee333290fa461f40811c791ca1655fd Mon Sep 17 00:00:00 2001 From: Sanjeev Bhatt Date: Mon, 22 Jul 2024 10:25:04 +0530 Subject: [PATCH 06/10] chore(spanner): Issue#1163 Remove dependency of spanner dbapi from spanner_v1 (#1164) - Moved BatchTransactionId dataclass from spanner_dbapi to spanner_v1.transactions Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- google/cloud/spanner_dbapi/partition_helper.py | 9 ++------- google/cloud/spanner_v1/__init__.py | 2 ++ google/cloud/spanner_v1/database.py | 2 +- google/cloud/spanner_v1/transaction.py | 9 +++++++++ 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/google/cloud/spanner_dbapi/partition_helper.py b/google/cloud/spanner_dbapi/partition_helper.py index 94b396c801..a130e29721 100644 --- a/google/cloud/spanner_dbapi/partition_helper.py +++ b/google/cloud/spanner_dbapi/partition_helper.py @@ -19,6 +19,8 @@ import pickle import base64 +from google.cloud.spanner_v1 import BatchTransactionId + def decode_from_string(encoded_partition_id): gzip_bytes = base64.b64decode(bytes(encoded_partition_id, "utf-8")) @@ -33,13 +35,6 @@ def encode_to_string(batch_transaction_id, partition_result): return str(base64.b64encode(gzip_bytes), "utf-8") -@dataclass -class BatchTransactionId: - transaction_id: str - session_id: str - read_timestamp: Any - - @dataclass class PartitionId: batch_transaction_id: BatchTransactionId diff --git a/google/cloud/spanner_v1/__init__.py b/google/cloud/spanner_v1/__init__.py index deba096163..d2e7a23938 100644 --- a/google/cloud/spanner_v1/__init__.py +++ b/google/cloud/spanner_v1/__init__.py @@ -64,6 +64,7 @@ from .types.type import TypeAnnotationCode from .types.type import TypeCode from .data_types import JsonObject +from .transaction import BatchTransactionId from google.cloud.spanner_v1 import param_types from google.cloud.spanner_v1.client import Client @@ -147,4 +148,5 @@ # google.cloud.spanner_v1.services "SpannerClient", "SpannerAsyncClient", + "BatchTransactionId", ) diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 5b7c27b236..6bd4f3703e 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -40,7 +40,7 @@ from google.cloud.spanner_admin_database_v1 import RestoreDatabaseRequest from google.cloud.spanner_admin_database_v1 import UpdateDatabaseDdlRequest from google.cloud.spanner_admin_database_v1.types import DatabaseDialect -from google.cloud.spanner_dbapi.partition_helper import BatchTransactionId +from google.cloud.spanner_v1.transaction import BatchTransactionId from google.cloud.spanner_v1 import ExecuteSqlRequest from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py index ee1dd8ef3b..c872cc380d 100644 --- a/google/cloud/spanner_v1/transaction.py +++ b/google/cloud/spanner_v1/transaction.py @@ -36,6 +36,8 @@ from google.cloud.spanner_v1 import RequestOptions from google.api_core import gapic_v1 from google.api_core.exceptions import InternalServerError +from dataclasses import dataclass +from typing import Any class Transaction(_SnapshotBase, _BatchBase): @@ -554,3 +556,10 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() else: self.rollback() + + +@dataclass +class BatchTransactionId: + transaction_id: str + session_id: str + read_timestamp: Any From ec96d36fca020e17ebb864dab96c5f25e5100a30 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 29 Jul 2024 14:06:56 +0200 Subject: [PATCH 07/10] chore(deps): update dependency pytest to v8.3.2 (#1167) --- samples/samples/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/samples/requirements-test.txt b/samples/samples/requirements-test.txt index ba323d2852..afed94c76a 100644 --- a/samples/samples/requirements-test.txt +++ b/samples/samples/requirements-test.txt @@ -1,4 +1,4 @@ -pytest==8.2.2 +pytest==8.3.2 pytest-dependency==0.6.0 mock==5.1.0 google-cloud-testutils==1.4.0 From cb74679a05960293dd03eb6b74bff0f68a46395c Mon Sep 17 00:00:00 2001 From: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> Date: Mon, 29 Jul 2024 20:31:35 +0530 Subject: [PATCH 08/10] fix(spanner): unskip emulator tests for proto (#1145) --- tests/system/test_database_api.py | 1 - tests/system/test_session_api.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/system/test_database_api.py b/tests/system/test_database_api.py index 244fccd069..c8b3c543fc 100644 --- a/tests/system/test_database_api.py +++ b/tests/system/test_database_api.py @@ -897,7 +897,6 @@ def _unit_of_work(transaction, test): def test_create_table_with_proto_columns( - not_emulator, not_postgres, shared_instance, databases_to_delete, diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index bbe6000aba..00fdf828da 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -2649,7 +2649,7 @@ def test_execute_sql_w_query_param_struct(sessions_database, not_postgres): def test_execute_sql_w_proto_message_bindings( - not_emulator, not_postgres, sessions_database, database_dialect + not_postgres, sessions_database, database_dialect ): singer_info = _sample_data.SINGER_INFO_1 singer_info_bytes = base64.b64encode(singer_info.SerializeToString()) From 74da4a7c8f5ff10b33c95a18b3702840827a4b56 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 30 Jul 2024 12:11:34 +0530 Subject: [PATCH 09/10] chore: Update gapic-generator-python to v1.18.4 (#1174) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/database_admin/async_client.py | 12 ++ .../services/database_admin/client.py | 12 ++ .../services/database_admin/pagers.py | 181 ++++++++++++++++-- .../services/instance_admin/async_client.py | 10 + .../services/instance_admin/client.py | 10 + .../services/instance_admin/pagers.py | 153 ++++++++++++++- .../services/spanner/async_client.py | 2 + .../spanner_v1/services/spanner/client.py | 2 + .../spanner_v1/services/spanner/pagers.py | 41 +++- .../test_database_admin.py | 39 +++- .../test_instance_admin.py | 37 +++- tests/unit/gapic/spanner_v1/test_spanner.py | 7 +- 12 files changed, 470 insertions(+), 36 deletions(-) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index 89c9f4e972..083aebcd42 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -416,6 +416,8 @@ async def sample_list_databases(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2412,6 +2414,8 @@ async def sample_list_backups(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2715,6 +2719,8 @@ async def sample_list_database_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2846,6 +2852,8 @@ async def sample_list_backup_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2968,6 +2976,8 @@ async def sample_list_database_roles(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3556,6 +3566,8 @@ async def sample_list_backup_schedules(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index 00fe12755a..9bdd254fb5 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -949,6 +949,8 @@ def sample_list_databases(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2912,6 +2914,8 @@ def sample_list_backups(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3209,6 +3213,8 @@ def sample_list_database_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3337,6 +3343,8 @@ def sample_list_backup_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3456,6 +3464,8 @@ def sample_list_database_roles(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -4029,6 +4039,8 @@ def sample_list_backup_schedules(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py index e5c9f15526..0fffae2ba6 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup_schedule from google.cloud.spanner_admin_database_v1.types import spanner_database_admin @@ -54,6 +67,8 @@ def __init__( request: spanner_database_admin.ListDatabasesRequest, response: spanner_database_admin.ListDatabasesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -65,12 +80,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_database_admin.ListDatabasesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -81,7 +101,12 @@ def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[spanner_database_admin.Database]: @@ -116,6 +141,8 @@ def __init__( request: spanner_database_admin.ListDatabasesRequest, response: spanner_database_admin.ListDatabasesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -127,12 +154,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_database_admin.ListDatabasesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -145,7 +177,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: @@ -184,6 +221,8 @@ def __init__( request: backup.ListBackupsRequest, response: backup.ListBackupsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -195,12 +234,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = backup.ListBackupsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -211,7 +255,12 @@ def pages(self) -> Iterator[backup.ListBackupsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[backup.Backup]: @@ -246,6 +295,8 @@ def __init__( request: backup.ListBackupsRequest, response: backup.ListBackupsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -257,12 +308,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = backup.ListBackupsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -273,7 +329,12 @@ async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[backup.Backup]: @@ -312,6 +373,8 @@ def __init__( request: spanner_database_admin.ListDatabaseOperationsRequest, response: spanner_database_admin.ListDatabaseOperationsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -323,12 +386,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -339,7 +407,12 @@ def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsRespons yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[operations_pb2.Operation]: @@ -376,6 +449,8 @@ def __init__( request: spanner_database_admin.ListDatabaseOperationsRequest, response: spanner_database_admin.ListDatabaseOperationsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -387,12 +462,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -405,7 +485,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: @@ -444,6 +529,8 @@ def __init__( request: backup.ListBackupOperationsRequest, response: backup.ListBackupOperationsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -455,12 +542,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = backup.ListBackupOperationsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -471,7 +563,12 @@ def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[operations_pb2.Operation]: @@ -506,6 +603,8 @@ def __init__( request: backup.ListBackupOperationsRequest, response: backup.ListBackupOperationsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -517,12 +616,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = backup.ListBackupOperationsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -533,7 +637,12 @@ async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: @@ -572,6 +681,8 @@ def __init__( request: spanner_database_admin.ListDatabaseRolesRequest, response: spanner_database_admin.ListDatabaseRolesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -583,12 +694,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_database_admin.ListDatabaseRolesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -599,7 +715,12 @@ def pages(self) -> Iterator[spanner_database_admin.ListDatabaseRolesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[spanner_database_admin.DatabaseRole]: @@ -636,6 +757,8 @@ def __init__( request: spanner_database_admin.ListDatabaseRolesRequest, response: spanner_database_admin.ListDatabaseRolesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -647,12 +770,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_database_admin.ListDatabaseRolesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -665,7 +793,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[spanner_database_admin.DatabaseRole]: @@ -704,6 +837,8 @@ def __init__( request: backup_schedule.ListBackupSchedulesRequest, response: backup_schedule.ListBackupSchedulesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -715,12 +850,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = backup_schedule.ListBackupSchedulesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -731,7 +871,12 @@ def pages(self) -> Iterator[backup_schedule.ListBackupSchedulesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[backup_schedule.BackupSchedule]: @@ -766,6 +911,8 @@ def __init__( request: backup_schedule.ListBackupSchedulesRequest, response: backup_schedule.ListBackupSchedulesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -777,12 +924,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = backup_schedule.ListBackupSchedulesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -793,7 +945,12 @@ async def pages(self) -> AsyncIterator[backup_schedule.ListBackupSchedulesRespon yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[backup_schedule.BackupSchedule]: diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index 7bae63ff52..4b823c48ce 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -412,6 +412,8 @@ async def sample_list_instance_configs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1153,6 +1155,8 @@ async def sample_list_instance_config_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1275,6 +1279,8 @@ async def sample_list_instances(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1399,6 +1405,8 @@ async def sample_list_instance_partitions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3148,6 +3156,8 @@ async def sample_list_instance_partition_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py index cb3664e0d2..d90d1707cd 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -855,6 +855,8 @@ def sample_list_instance_configs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1583,6 +1585,8 @@ def sample_list_instance_config_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1702,6 +1706,8 @@ def sample_list_instances(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1823,6 +1829,8 @@ def sample_list_instance_partitions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3556,6 +3564,8 @@ def sample_list_instance_partition_operations(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py index d0cd7eec47..89973615b0 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin from google.longrunning import operations_pb2 # type: ignore @@ -52,6 +65,8 @@ def __init__( request: spanner_instance_admin.ListInstanceConfigsRequest, response: spanner_instance_admin.ListInstanceConfigsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -63,12 +78,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -79,7 +99,12 @@ def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: @@ -116,6 +141,8 @@ def __init__( request: spanner_instance_admin.ListInstanceConfigsRequest, response: spanner_instance_admin.ListInstanceConfigsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -127,12 +154,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -145,7 +177,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: @@ -186,6 +223,8 @@ def __init__( request: spanner_instance_admin.ListInstanceConfigOperationsRequest, response: spanner_instance_admin.ListInstanceConfigOperationsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -197,6 +236,9 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ @@ -205,6 +247,8 @@ def __init__( request ) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -217,7 +261,12 @@ def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[operations_pb2.Operation]: @@ -254,6 +303,8 @@ def __init__( request: spanner_instance_admin.ListInstanceConfigOperationsRequest, response: spanner_instance_admin.ListInstanceConfigOperationsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -265,6 +316,9 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ @@ -273,6 +327,8 @@ def __init__( request ) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -285,7 +341,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: @@ -324,6 +385,8 @@ def __init__( request: spanner_instance_admin.ListInstancesRequest, response: spanner_instance_admin.ListInstancesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -335,12 +398,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_instance_admin.ListInstancesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -351,7 +419,12 @@ def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: @@ -386,6 +459,8 @@ def __init__( request: spanner_instance_admin.ListInstancesRequest, response: spanner_instance_admin.ListInstancesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -397,12 +472,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_instance_admin.ListInstancesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -415,7 +495,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: @@ -454,6 +539,8 @@ def __init__( request: spanner_instance_admin.ListInstancePartitionsRequest, response: spanner_instance_admin.ListInstancePartitionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -465,12 +552,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -481,7 +573,12 @@ def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionsRespons yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[spanner_instance_admin.InstancePartition]: @@ -518,6 +615,8 @@ def __init__( request: spanner_instance_admin.ListInstancePartitionsRequest, response: spanner_instance_admin.ListInstancePartitionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -529,12 +628,17 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -547,7 +651,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstancePartition]: @@ -588,6 +697,8 @@ def __init__( request: spanner_instance_admin.ListInstancePartitionOperationsRequest, response: spanner_instance_admin.ListInstancePartitionOperationsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -599,6 +710,9 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ @@ -607,6 +721,8 @@ def __init__( request ) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -619,7 +735,12 @@ def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[operations_pb2.Operation]: @@ -657,6 +778,8 @@ def __init__( request: spanner_instance_admin.ListInstancePartitionOperationsRequest, response: spanner_instance_admin.ListInstancePartitionOperationsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -668,6 +791,9 @@ def __init__( The initial request object. response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ @@ -676,6 +802,8 @@ def __init__( request ) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -688,7 +816,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index cb1981d8b2..e1c6271710 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -731,6 +731,8 @@ async def sample_list_sessions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index 15a9eb45d6..7a07fe86c1 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -1162,6 +1162,8 @@ def sample_list_sessions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/spanner_v1/services/spanner/pagers.py b/google/cloud/spanner_v1/services/spanner/pagers.py index 506de51067..54b517f463 100644 --- a/google/cloud/spanner_v1/services/spanner/pagers.py +++ b/google/cloud/spanner_v1/services/spanner/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.spanner_v1.types import spanner @@ -51,6 +64,8 @@ def __init__( request: spanner.ListSessionsRequest, response: spanner.ListSessionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.cloud.spanner_v1.types.ListSessionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner.ListSessionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[spanner.ListSessionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[spanner.Session]: @@ -113,6 +138,8 @@ def __init__( request: spanner.ListSessionsRequest, response: spanner.ListSessionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.cloud.spanner_v1.types.ListSessionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = spanner.ListSessionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[spanner.Session]: diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index c9b63a9109..ce196a15f8 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -47,6 +47,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.spanner_admin_database_v1.services.database_admin import ( @@ -1550,12 +1551,16 @@ def test_list_databases_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_databases(request={}) + pager = client.list_databases(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -7478,12 +7483,16 @@ def test_list_backups_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_backups(request={}) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -8449,12 +8458,18 @@ def test_list_database_operations_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_database_operations(request={}) + pager = client.list_database_operations( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -9038,12 +9053,16 @@ def test_list_backup_operations_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_backup_operations(request={}) + pager = client.list_backup_operations(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -9625,12 +9644,16 @@ def test_list_database_roles_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_database_roles(request={}) + pager = client.list_database_roles(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -11778,12 +11801,16 @@ def test_list_backup_schedules_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_backup_schedules(request={}) + pager = client.list_backup_schedules(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index 138f02f9a4..4550c4a585 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -47,6 +47,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.spanner_admin_instance_v1.services.instance_admin import ( @@ -1564,12 +1565,16 @@ def test_list_instance_configs_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_instance_configs(request={}) + pager = client.list_instance_configs(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3770,12 +3775,18 @@ def test_list_instance_config_operations_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_instance_config_operations(request={}) + pager = client.list_instance_config_operations( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4340,12 +4351,16 @@ def test_list_instances_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_instances(request={}) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4927,12 +4942,18 @@ def test_list_instance_partitions_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_instance_partitions(request={}) + pager = client.list_instance_partitions( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -9844,12 +9865,18 @@ def test_list_instance_partition_operations_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_instance_partition_operations(request={}) + pager = client.list_instance_partition_operations( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index fe59c2387d..70ba97827e 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient @@ -2600,12 +2601,16 @@ def test_list_sessions_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("database", ""),)), ) - pager = client.list_sessions(request={}) + pager = client.list_sessions(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 From c939d0f132d187cc7b4b10fd2a5c775cc2af5eaa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 31 Jul 2024 10:59:26 +0530 Subject: [PATCH 10/10] chore(main): release 3.48.0 (#1153) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 16 ++++++++++++++++ .../spanner_admin_database_v1/gapic_version.py | 2 +- .../spanner_admin_instance_v1/gapic_version.py | 2 +- google/cloud/spanner_v1/gapic_version.py | 2 +- ...etadata_google.spanner.admin.database.v1.json | 2 +- ...etadata_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- 8 files changed, 23 insertions(+), 7 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index aab31dc8eb..cc48236467 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.47.0" + ".": "3.48.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index f3b30205f7..89494da26a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## [3.48.0](https://github.com/googleapis/python-spanner/compare/v3.47.0...v3.48.0) (2024-07-30) + + +### Features + +* Add field lock_hint in spanner.proto ([9609ad9](https://github.com/googleapis/python-spanner/commit/9609ad96d062fbd8fa4d622bfe8da119329facc0)) +* Add field order_by in spanner.proto ([9609ad9](https://github.com/googleapis/python-spanner/commit/9609ad96d062fbd8fa4d622bfe8da119329facc0)) +* Add support for Cloud Spanner Scheduled Backups ([9609ad9](https://github.com/googleapis/python-spanner/commit/9609ad96d062fbd8fa4d622bfe8da119329facc0)) +* **spanner:** Add support for txn changstream exclusion ([#1152](https://github.com/googleapis/python-spanner/issues/1152)) ([00ccb7a](https://github.com/googleapis/python-spanner/commit/00ccb7a5c1f246b5099265058a5e9875e6627024)) + + +### Bug Fixes + +* Allow protobuf 5.x ([9609ad9](https://github.com/googleapis/python-spanner/commit/9609ad96d062fbd8fa4d622bfe8da119329facc0)) +* **spanner:** Unskip emulator tests for proto ([#1145](https://github.com/googleapis/python-spanner/issues/1145)) ([cb74679](https://github.com/googleapis/python-spanner/commit/cb74679a05960293dd03eb6b74bff0f68a46395c)) + ## [3.47.0](https://github.com/googleapis/python-spanner/compare/v3.46.0...v3.47.0) (2024-05-22) diff --git a/google/cloud/spanner_admin_database_v1/gapic_version.py b/google/cloud/spanner_admin_database_v1/gapic_version.py index 19ba6fe27e..ebd305d0c8 100644 --- a/google/cloud/spanner_admin_database_v1/gapic_version.py +++ b/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.47.0" # {x-release-please-version} +__version__ = "3.48.0" # {x-release-please-version} diff --git a/google/cloud/spanner_admin_instance_v1/gapic_version.py b/google/cloud/spanner_admin_instance_v1/gapic_version.py index 19ba6fe27e..ebd305d0c8 100644 --- a/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ b/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.47.0" # {x-release-please-version} +__version__ = "3.48.0" # {x-release-please-version} diff --git a/google/cloud/spanner_v1/gapic_version.py b/google/cloud/spanner_v1/gapic_version.py index 19ba6fe27e..ebd305d0c8 100644 --- a/google/cloud/spanner_v1/gapic_version.py +++ b/google/cloud/spanner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.47.0" # {x-release-please-version} +__version__ = "3.48.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index 86a6b4fa78..1eab73422e 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "0.1.0" + "version": "3.48.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 0811b451cb..1ae7294c61 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "0.1.0" + "version": "3.48.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index 4384d19e2a..70e86962ed 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "0.1.0" + "version": "3.48.0" }, "snippets": [ { pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy