From 32f337bb7af9551b208f0a6b8830fd10e80316f3 Mon Sep 17 00:00:00 2001 From: Sanjeev Bhatt Date: Mon, 5 Aug 2024 10:27:03 +0530 Subject: [PATCH 01/10] chore(spanner): Issue591# cursor.list tables() is returning views (#1162) * chore(spanner): Issue#591 - Update dbapi cursor.list_tables() - add arg include_views - cursor.list_tables() return table and views by default - added another variable include_view - returns tables and views if include_view is set to True(default) - returns tables only if include_view is set to False - kept default value to True otherwise it will break any existing script * Revert "chore(spanner): Issue#591 - Update dbapi cursor.list_tables() - add arg include_views" This reverts commit e898d4ea0a69464d38f8c4d5c461a858558bd41b. * chore(spanner): Issue591# cursor.list_tables() is returning views as well - cursor.list_tables() returns table and views by default - added parameter include_views with default to True - If include_views is false, cursor.list_tables() would return only tables(table_type = 'BASE TABLE') * chore(spanner): Issue591# cursor.list tables() is returning views - fix lint failure * chore(spanner): Issue591# cursor.list tables() is returning views - Fixed unit test --------- Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- google/cloud/spanner_dbapi/_helpers.py | 8 ++++++++ google/cloud/spanner_dbapi/cursor.py | 6 ++++-- tests/system/test_dbapi.py | 25 +++++++++++++++++++++---- tests/unit/spanner_dbapi/test_cursor.py | 2 +- 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/google/cloud/spanner_dbapi/_helpers.py b/google/cloud/spanner_dbapi/_helpers.py index b27ef1564f..3f88eda4dd 100644 --- a/google/cloud/spanner_dbapi/_helpers.py +++ b/google/cloud/spanner_dbapi/_helpers.py @@ -18,6 +18,14 @@ SQL_LIST_TABLES = """ SELECT table_name FROM information_schema.tables +WHERE table_catalog = '' +AND table_schema = @table_schema +AND table_type = 'BASE TABLE' +""" + +SQL_LIST_TABLES_AND_VIEWS = """ +SELECT table_name +FROM information_schema.tables WHERE table_catalog = '' AND table_schema = @table_schema """ diff --git a/google/cloud/spanner_dbapi/cursor.py b/google/cloud/spanner_dbapi/cursor.py index bd2ad974f9..bcbc8aa5a8 100644 --- a/google/cloud/spanner_dbapi/cursor.py +++ b/google/cloud/spanner_dbapi/cursor.py @@ -522,14 +522,16 @@ def __iter__(self): raise ProgrammingError("no results to return") return self._itr - def list_tables(self, schema_name=""): + def list_tables(self, schema_name="", include_views=True): """List the tables of the linked Database. :rtype: list :returns: The list of tables within the Database. """ return self.run_sql_in_snapshot( - sql=_helpers.SQL_LIST_TABLES, + sql=_helpers.SQL_LIST_TABLES_AND_VIEWS + if include_views + else _helpers.SQL_LIST_TABLES, params={"table_schema": schema_name}, param_types={"table_schema": spanner.param_types.STRING}, ) diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py index 67854eeeac..5a77024689 100644 --- a/tests/system/test_dbapi.py +++ b/tests/system/test_dbapi.py @@ -39,15 +39,20 @@ EXECUTE_SQL_METHOD = SPANNER_RPC_PREFIX + "ExecuteSql" EXECUTE_STREAMING_SQL_METHOD = SPANNER_RPC_PREFIX + "ExecuteStreamingSql" -DDL_STATEMENTS = ( - """CREATE TABLE contacts ( +DDL = """CREATE TABLE contacts ( contact_id INT64, first_name STRING(1024), last_name STRING(1024), email STRING(1024) ) - PRIMARY KEY (contact_id)""", -) + PRIMARY KEY (contact_id); + CREATE VIEW contacts_emails + SQL SECURITY INVOKER + AS + SELECT c.email + FROM contacts AS c;""" + +DDL_STATEMENTS = [stmt.strip() for stmt in DDL.split(";") if stmt.strip()] @pytest.fixture(scope="session") @@ -1581,3 +1586,15 @@ def test_dml_returning_delete(self, autocommit): assert self._cursor.fetchone() == (1, "first-name") assert self._cursor.rowcount == 1 self._conn.commit() + + @pytest.mark.parametrize("include_views", [True, False]) + def test_list_tables(self, include_views): + tables = self._cursor.list_tables(include_views=include_views) + table_names = set(table[0] for table in tables) + + assert "contacts" in table_names + + if include_views: + assert "contacts_emails" in table_names + else: # if not include_views: + assert "contacts_emails" not in table_names diff --git a/tests/unit/spanner_dbapi/test_cursor.py b/tests/unit/spanner_dbapi/test_cursor.py index 1fcdb03a96..3836e1f8e5 100644 --- a/tests/unit/spanner_dbapi/test_cursor.py +++ b/tests/unit/spanner_dbapi/test_cursor.py @@ -948,7 +948,7 @@ def test_list_tables(self): ) as mock_run_sql: cursor.list_tables() mock_run_sql.assert_called_once_with( - sql=_helpers.SQL_LIST_TABLES, + sql=_helpers.SQL_LIST_TABLES_AND_VIEWS, params={"table_schema": ""}, param_types={"table_schema": param_types.STRING}, ) From 1a771466e9bc4f8105dba8c1ed5474a2bbb0f2c1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 5 Aug 2024 10:59:29 +0200 Subject: [PATCH 02/10] chore(deps): update dependency google-cloud-spanner to v3.48.0 (#1177) --- samples/samples/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/samples/requirements.txt b/samples/samples/requirements.txt index 3058d80948..516abe7f8b 100644 --- a/samples/samples/requirements.txt +++ b/samples/samples/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-spanner==3.47.0 +google-cloud-spanner==3.48.0 futures==3.4.0; python_version < "3" From 55f83dc5f776d436b30da6056a9cdcad3971ce39 Mon Sep 17 00:00:00 2001 From: Varun Naik Date: Tue, 6 Aug 2024 01:00:48 -0700 Subject: [PATCH 03/10] feat(spanner): add samples for instance partitions (#1168) * feat(spanner): add samples for instance partitions * PR feedback --------- Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- samples/samples/snippets.py | 30 ++++++++++++++++++++++++++++++ samples/samples/snippets_test.py | 18 ++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index e7c76685d3..93c8de4148 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -158,6 +158,36 @@ def list_instance_config(): # [END spanner_list_instance_configs] +# [START spanner_create_instance_partition] +def create_instance_partition(instance_id, instance_partition_id): + """Creates an instance partition.""" + from google.cloud.spanner_admin_instance_v1.types import \ + spanner_instance_admin + + spanner_client = spanner.Client() + instance_admin_api = spanner_client.instance_admin_api + + config_name = "{}/instanceConfigs/nam3".format(spanner_client.project_name) + + operation = spanner_client.instance_admin_api.create_instance_partition( + parent=instance_admin_api.instance_path(spanner_client.project, instance_id), + instance_partition_id=instance_partition_id, + instance_partition=spanner_instance_admin.InstancePartition( + config=config_name, + display_name="Test instance partition", + node_count=1, + ), + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print("Created instance partition {}".format(instance_partition_id)) + + +# [END spanner_create_instance_partition] + + # [START spanner_list_databases] def list_databases(instance_id): """Lists databases and their leader options.""" diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 909305a65a..6657703fd1 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -82,6 +82,12 @@ def lci_instance_id(): return f"lci-instance-{uuid.uuid4().hex[:10]}" +@pytest.fixture(scope="module") +def instance_partition_instance_id(): + """Id for the instance that tests instance partitions.""" + return f"instance-partition-test-{uuid.uuid4().hex[:10]}" + + @pytest.fixture(scope="module") def database_id(): return f"test-db-{uuid.uuid4().hex[:10]}" @@ -188,6 +194,18 @@ def test_create_instance_with_autoscaling_config(capsys, lci_instance_id): retry_429(instance.delete)() +def test_create_instance_partition(capsys, instance_partition_instance_id): + snippets.create_instance(instance_partition_instance_id) + retry_429(snippets.create_instance_partition)( + instance_partition_instance_id, "my-instance-partition" + ) + out, _ = capsys.readouterr() + assert "Created instance partition my-instance-partition" in out + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_partition_instance_id) + retry_429(instance.delete)() + + def test_update_database(capsys, instance_id, sample_database): snippets.update_database(instance_id, sample_database.database_id) out, _ = capsys.readouterr() From b503fc95d8abd47869a24f0e824a227a281282d6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:55:08 +0530 Subject: [PATCH 04/10] feat(spanner): Add resource reference annotation to backup schedules (#1176) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(spanner): Add support for Cloud Spanner Incremental Backups PiperOrigin-RevId: 657612329 Source-Link: https://github.com/googleapis/googleapis/commit/e77b669b90be3edd814ded7f183eed3b863da947 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0f663469f3edcc34c60c1bbe01727cc5eb971c60 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGY2NjM0NjlmM2VkY2MzNGM2MGMxYmJlMDE3MjdjYzVlYjk3MWM2MCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.5 PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): add edition field to the instance proto PiperOrigin-RevId: 662226829 Source-Link: https://github.com/googleapis/googleapis/commit/eb87f475f5f1a5b5ae7de7fbdc9bc822ca1e87b4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0fb784e8267f0931d24f152ec5f66e809c2a2efb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGZiNzg0ZTgyNjdmMDkzMWQyNGYxNTJlYzVmNjZlODA5YzJhMmVmYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add resource reference annotation to backup schedules docs(spanner): Add an example to filter backups based on schedule name PiperOrigin-RevId: 662402292 Source-Link: https://github.com/googleapis/googleapis/commit/96facece981f227c5d54133845fc519f73900b8e Source-Link: https://github.com/googleapis/googleapis-gen/commit/fe33f1c61415aef4e70f491dfb8789a68e8d9083 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZmUzM2YxYzYxNDE1YWVmNGU3MGY0OTFkZmI4Nzg5YTY4ZThkOTA4MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- .../spanner_admin_database_v1/__init__.py | 2 + .../services/database_admin/async_client.py | 5 +- .../services/database_admin/client.py | 2 +- .../types/__init__.py | 2 + .../spanner_admin_database_v1/types/backup.py | 74 ++ .../types/backup_schedule.py | 15 + .../spanner_admin_instance_v1/__init__.py | 6 + .../gapic_metadata.json | 15 + .../services/instance_admin/async_client.py | 250 ++++- .../services/instance_admin/client.py | 245 ++++- .../instance_admin/transports/base.py | 14 + .../instance_admin/transports/grpc.py | 153 ++- .../instance_admin/transports/grpc_asyncio.py | 159 ++- .../instance_admin/transports/rest.py | 135 +++ .../types/__init__.py | 6 + .../types/spanner_instance_admin.py | 200 +++- .../services/spanner/async_client.py | 5 +- .../spanner_v1/services/spanner/client.py | 2 +- ...data_google.spanner.admin.database.v1.json | 2 +- ...data_google.spanner.admin.instance.v1.json | 155 ++- .../snippet_metadata_google.spanner.v1.json | 2 +- ...ated_instance_admin_move_instance_async.py | 57 ++ ...rated_instance_admin_move_instance_sync.py | 57 ++ ...ixup_spanner_admin_instance_v1_keywords.py | 1 + .../test_database_admin.py | 337 ++++--- .../test_instance_admin.py | 936 ++++++++++++++---- tests/unit/gapic/spanner_v1/test_spanner.py | 144 +-- 27 files changed, 2419 insertions(+), 562 deletions(-) create mode 100644 samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py diff --git a/google/cloud/spanner_admin_database_v1/__init__.py b/google/cloud/spanner_admin_database_v1/__init__.py index 74715d1e44..d81a0e2dcc 100644 --- a/google/cloud/spanner_admin_database_v1/__init__.py +++ b/google/cloud/spanner_admin_database_v1/__init__.py @@ -32,6 +32,7 @@ from .types.backup import DeleteBackupRequest from .types.backup import FullBackupSpec from .types.backup import GetBackupRequest +from .types.backup import IncrementalBackupSpec from .types.backup import ListBackupOperationsRequest from .types.backup import ListBackupOperationsResponse from .types.backup import ListBackupsRequest @@ -108,6 +109,7 @@ "GetDatabaseDdlRequest", "GetDatabaseDdlResponse", "GetDatabaseRequest", + "IncrementalBackupSpec", "ListBackupOperationsRequest", "ListBackupOperationsResponse", "ListBackupSchedulesRequest", diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index 083aebcd42..d714d52311 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -230,9 +229,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(DatabaseAdminClient).get_transport_class, type(DatabaseAdminClient) - ) + get_transport_class = DatabaseAdminClient.get_transport_class def __init__( self, diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index 9bdd254fb5..0a68cb2e44 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -819,7 +819,7 @@ def __init__( transport_init: Union[ Type[DatabaseAdminTransport], Callable[..., DatabaseAdminTransport] ] = ( - type(self).get_transport_class(transport) + DatabaseAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., DatabaseAdminTransport], transport) ) diff --git a/google/cloud/spanner_admin_database_v1/types/__init__.py b/google/cloud/spanner_admin_database_v1/types/__init__.py index 2743a7be51..9a9515e9b2 100644 --- a/google/cloud/spanner_admin_database_v1/types/__init__.py +++ b/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -25,6 +25,7 @@ DeleteBackupRequest, FullBackupSpec, GetBackupRequest, + IncrementalBackupSpec, ListBackupOperationsRequest, ListBackupOperationsResponse, ListBackupsRequest, @@ -88,6 +89,7 @@ "DeleteBackupRequest", "FullBackupSpec", "GetBackupRequest", + "IncrementalBackupSpec", "ListBackupOperationsRequest", "ListBackupOperationsResponse", "ListBackupsRequest", diff --git a/google/cloud/spanner_admin_database_v1/types/backup.py b/google/cloud/spanner_admin_database_v1/types/backup.py index 156f16f114..0c220c3953 100644 --- a/google/cloud/spanner_admin_database_v1/types/backup.py +++ b/google/cloud/spanner_admin_database_v1/types/backup.py @@ -44,6 +44,7 @@ "CreateBackupEncryptionConfig", "CopyBackupEncryptionConfig", "FullBackupSpec", + "IncrementalBackupSpec", }, ) @@ -98,6 +99,30 @@ class Backup(proto.Message): equivalent to the ``create_time``. size_bytes (int): Output only. Size of the backup in bytes. + freeable_size_bytes (int): + Output only. The number of bytes that will be + freed by deleting this backup. This value will + be zero if, for example, this backup is part of + an incremental backup chain and younger backups + in the chain require that we keep its data. For + backups not in an incremental backup chain, this + is always the size of the backup. This value may + change if backups on the same chain get created, + deleted or expired. + exclusive_size_bytes (int): + Output only. For a backup in an incremental + backup chain, this is the storage space needed + to keep the data that has changed since the + previous backup. For all other backups, this is + always the size of the backup. This value may + change if backups on the same chain get deleted + or expired. + + This field can be used to calculate the total + storage space used by a set of backups. For + example, the total space used by all backups of + a database can be computed by summing up this + field. state (google.cloud.spanner_admin_database_v1.types.Backup.State): Output only. The current state of the backup. referencing_databases (MutableSequence[str]): @@ -156,6 +181,24 @@ class Backup(proto.Message): If collapsing is not done, then this field captures the single backup schedule URI associated with creating this backup. + incremental_backup_chain_id (str): + Output only. Populated only for backups in an incremental + backup chain. Backups share the same chain id if and only if + they belong to the same incremental backup chain. Use this + field to determine which backups are part of the same + incremental backup chain. The ordering of backups in the + chain can be determined by ordering the backup + ``version_time``. + oldest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Data deleted at a time older + than this is guaranteed not to be retained in + order to support this backup. For a backup in an + incremental backup chain, this is the version + time of the oldest backup that exists or ever + existed in the chain. For all other backups, + this is the version time of the backup. This + field can be used to understand what data is + being retained by the backup system. """ class State(proto.Enum): @@ -201,6 +244,14 @@ class State(proto.Enum): proto.INT64, number=5, ) + freeable_size_bytes: int = proto.Field( + proto.INT64, + number=15, + ) + exclusive_size_bytes: int = proto.Field( + proto.INT64, + number=16, + ) state: State = proto.Field( proto.ENUM, number=6, @@ -240,6 +291,15 @@ class State(proto.Enum): proto.STRING, number=14, ) + incremental_backup_chain_id: str = proto.Field( + proto.STRING, + number=17, + ) + oldest_version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) class CreateBackupRequest(proto.Message): @@ -553,6 +613,7 @@ class ListBackupsRequest(proto.Message): - ``version_time`` (and values are of the format YYYY-MM-DDTHH:MM:SSZ) - ``size_bytes`` + - ``backup_schedules`` You can combine multiple expressions by enclosing each expression in parentheses. By default, expressions are @@ -576,6 +637,8 @@ class ListBackupsRequest(proto.Message): ``expire_time`` is before 2018-03-28T14:50:00Z. - ``size_bytes > 10000000000`` - The backup's size is greater than 10GB + - ``backup_schedules:daily`` - The backup is created from a + schedule with "daily" in its name. page_size (int): Number of backups to be returned in the response. If 0 or less, defaults to the server's @@ -999,4 +1062,15 @@ class FullBackupSpec(proto.Message): """ +class IncrementalBackupSpec(proto.Message): + r"""The specification for incremental backup chains. + An incremental backup stores the delta of changes between a + previous backup and the database contents at a given version + time. An incremental backup chain consists of a full backup and + zero or more successive incremental backups. The first backup + created for an incremental backup chain is always a full backup. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/google/cloud/spanner_admin_database_v1/types/backup_schedule.py index 14ea180bc3..ad9a7ddaf2 100644 --- a/google/cloud/spanner_admin_database_v1/types/backup_schedule.py +++ b/google/cloud/spanner_admin_database_v1/types/backup_schedule.py @@ -66,6 +66,10 @@ class BackupSchedule(proto.Message): specification for a Spanner database. Next ID: 10 + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -96,6 +100,11 @@ class BackupSchedule(proto.Message): full_backup_spec (google.cloud.spanner_admin_database_v1.types.FullBackupSpec): The schedule creates only full backups. + This field is a member of `oneof`_ ``backup_type_spec``. + incremental_backup_spec (google.cloud.spanner_admin_database_v1.types.IncrementalBackupSpec): + The schedule creates incremental backup + chains. + This field is a member of `oneof`_ ``backup_type_spec``. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp at which the @@ -129,6 +138,12 @@ class BackupSchedule(proto.Message): oneof="backup_type_spec", message=backup.FullBackupSpec, ) + incremental_backup_spec: backup.IncrementalBackupSpec = proto.Field( + proto.MESSAGE, + number=8, + oneof="backup_type_spec", + message=backup.IncrementalBackupSpec, + ) update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=9, diff --git a/google/cloud/spanner_admin_instance_v1/__init__.py b/google/cloud/spanner_admin_instance_v1/__init__.py index bf71662118..5d0cad98e8 100644 --- a/google/cloud/spanner_admin_instance_v1/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/__init__.py @@ -49,6 +49,9 @@ from .types.spanner_instance_admin import ListInstancePartitionsResponse from .types.spanner_instance_admin import ListInstancesRequest from .types.spanner_instance_admin import ListInstancesResponse +from .types.spanner_instance_admin import MoveInstanceMetadata +from .types.spanner_instance_admin import MoveInstanceRequest +from .types.spanner_instance_admin import MoveInstanceResponse from .types.spanner_instance_admin import ReplicaInfo from .types.spanner_instance_admin import UpdateInstanceConfigMetadata from .types.spanner_instance_admin import UpdateInstanceConfigRequest @@ -87,6 +90,9 @@ "ListInstancePartitionsResponse", "ListInstancesRequest", "ListInstancesResponse", + "MoveInstanceMetadata", + "MoveInstanceRequest", + "MoveInstanceResponse", "OperationProgress", "ReplicaInfo", "UpdateInstanceConfigMetadata", diff --git a/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/google/cloud/spanner_admin_instance_v1/gapic_metadata.json index 361a5807c8..60fa46718a 100644 --- a/google/cloud/spanner_admin_instance_v1/gapic_metadata.json +++ b/google/cloud/spanner_admin_instance_v1/gapic_metadata.json @@ -85,6 +85,11 @@ "list_instances" ] }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" @@ -190,6 +195,11 @@ "list_instances" ] }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" @@ -295,6 +305,11 @@ "list_instances" ] }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index 4b823c48ce..045e5c377a 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -225,9 +224,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(InstanceAdminClient).get_transport_class, type(InstanceAdminClient) - ) + get_transport_class = InstanceAdminClient.get_transport_class def __init__( self, @@ -545,39 +542,39 @@ async def create_instance_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an instance config and begins preparing it to be used. - The returned [long-running + r"""Creates an instance configuration and begins preparing it to be + used. The returned [long-running operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance config. The instance - config name is assigned by the caller. If the named instance - config already exists, ``CreateInstanceConfig`` returns - ``ALREADY_EXISTS``. + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. Immediately after the request returns: - - The instance config is readable via the API, with all - requested attributes. The instance config's + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. Its state is ``CREATING``. While the operation is pending: - - Cancelling the operation renders the instance config + - Cancelling the operation renders the instance configuration immediately unreadable via the API. - Except for deleting the creating resource, all other attempts - to modify the instance config are rejected. + to modify the instance configuration are rejected. Upon completion of the returned operation: - Instances can be created using the instance configuration. - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. Its state becomes ``READY``. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track creation of the instance config. The + can be used to track creation of the instance configuration. The [metadata][google.longrunning.Operation.metadata] field type is [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type @@ -626,7 +623,7 @@ async def sample_create_instance_config(): [CreateInstanceConfigRequest][InstanceAdmin.CreateInstanceConfigRequest]. parent (:class:`str`): Required. The name of the project in which to create the - instance config. Values are of the form + instance configuration. Values are of the form ``projects/``. This corresponds to the ``parent`` field @@ -644,11 +641,11 @@ async def sample_create_instance_config(): on the ``request`` instance; if ``request`` is provided, this should not be set. instance_config_id (:class:`str`): - Required. The ID of the instance config to create. Valid - identifiers are of the form + Required. The ID of the instance configuration to + create. Valid identifiers are of the form ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 characters in length. The ``custom-`` prefix is - required to avoid name conflicts with Google managed + required to avoid name conflicts with Google-managed configurations. This corresponds to the ``instance_config_id`` field @@ -739,16 +736,16 @@ async def update_instance_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates an instance config. The returned [long-running + r"""Updates an instance configuration. The returned [long-running operation][google.longrunning.Operation] can be used to track the progress of updating the instance. If the named instance - config does not exist, returns ``NOT_FOUND``. + configuration does not exist, returns ``NOT_FOUND``. - Only user managed configurations can be updated. + Only user-managed configurations can be updated. Immediately after the request returns: - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. @@ -759,25 +756,27 @@ async def update_instance_config( The operation is guaranteed to succeed at undoing all changes, after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance config are + - All other attempts to modify the instance configuration are rejected. - - Reading the instance config via the API continues to give the - pre-request values. + - Reading the instance configuration via the API continues to + give the pre-request values. Upon completion of the returned operation: - Creating instances using the instance configuration uses the new values. - - The instance config's new values are readable via the API. - - The instance config's + - The new values of the instance configuration are readable via + the API. + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track the instance config modification. The - [metadata][google.longrunning.Operation.metadata] field type is + can be used to track the instance configuration modification. + The [metadata][google.longrunning.Operation.metadata] field type + is [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type is @@ -822,9 +821,9 @@ async def sample_update_instance_config(): The request object. The request for [UpdateInstanceConfigRequest][InstanceAdmin.UpdateInstanceConfigRequest]. instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): - Required. The user instance config to update, which must - always include the instance config name. Otherwise, only - fields mentioned in + Required. The user instance configuration to update, + which must always include the instance configuration + name. Otherwise, only fields mentioned in [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] need be included. To prevent conflicts of concurrent updates, @@ -931,11 +930,11 @@ async def delete_instance_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes the instance config. Deletion is only allowed when no - instances are using the configuration. If any instances are - using the config, returns ``FAILED_PRECONDITION``. + r"""Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. - Only user managed configurations can be deleted. + Only user-managed configurations can be deleted. Authorization requires ``spanner.instanceConfigs.delete`` permission on the resource @@ -1036,9 +1035,9 @@ async def list_instance_config_operations( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstanceConfigOperationsAsyncPager: - r"""Lists the user-managed instance config [long-running + r"""Lists the user-managed instance configuration [long-running operations][google.longrunning.Operation] in the given project. - An instance config operation has a name of the form + An instance configuration operation has a name of the form ``projects//instanceConfigs//operations/``. The long-running operation [metadata][google.longrunning.Operation.metadata] field type @@ -1081,8 +1080,9 @@ async def sample_list_instance_config_operations(): The request object. The request for [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. parent (:class:`str`): - Required. The project of the instance config operations. - Values are of the form ``projects/``. + Required. The project of the instance configuration + operations. Values are of the form + ``projects/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3164,6 +3164,172 @@ async def sample_list_instance_partition_operations(): # Done; return the response. return response + async def move_instance( + self, + request: Optional[ + Union[spanner_instance_admin.MoveInstanceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Moves an instance to the target instance configuration. You can + use the returned [long-running + operation][google.longrunning.Operation] to track the progress + of moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are + subject to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned [long-running + operation][google.longrunning.Operation] has a name of the + format ``/operations/`` and can be + used to track the move instance operation. The + [metadata][google.longrunning.Operation.metadata] field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]]): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): + request = spanner_instance_admin.MoveInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.move_instance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.MoveInstanceResponse, + metadata_type=spanner_instance_admin.MoveInstanceMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "InstanceAdminAsyncClient": return self diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py index d90d1707cd..6d767f7383 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -724,7 +724,7 @@ def __init__( transport_init: Union[ Type[InstanceAdminTransport], Callable[..., InstanceAdminTransport] ] = ( - type(self).get_transport_class(transport) + InstanceAdminClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., InstanceAdminTransport], transport) ) @@ -985,39 +985,39 @@ def create_instance_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an instance config and begins preparing it to be used. - The returned [long-running + r"""Creates an instance configuration and begins preparing it to be + used. The returned [long-running operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance config. The instance - config name is assigned by the caller. If the named instance - config already exists, ``CreateInstanceConfig`` returns - ``ALREADY_EXISTS``. + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. Immediately after the request returns: - - The instance config is readable via the API, with all - requested attributes. The instance config's + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. Its state is ``CREATING``. While the operation is pending: - - Cancelling the operation renders the instance config + - Cancelling the operation renders the instance configuration immediately unreadable via the API. - Except for deleting the creating resource, all other attempts - to modify the instance config are rejected. + to modify the instance configuration are rejected. Upon completion of the returned operation: - Instances can be created using the instance configuration. - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. Its state becomes ``READY``. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track creation of the instance config. The + can be used to track creation of the instance configuration. The [metadata][google.longrunning.Operation.metadata] field type is [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type @@ -1066,7 +1066,7 @@ def sample_create_instance_config(): [CreateInstanceConfigRequest][InstanceAdmin.CreateInstanceConfigRequest]. parent (str): Required. The name of the project in which to create the - instance config. Values are of the form + instance configuration. Values are of the form ``projects/``. This corresponds to the ``parent`` field @@ -1084,11 +1084,11 @@ def sample_create_instance_config(): on the ``request`` instance; if ``request`` is provided, this should not be set. instance_config_id (str): - Required. The ID of the instance config to create. Valid - identifiers are of the form + Required. The ID of the instance configuration to + create. Valid identifiers are of the form ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 characters in length. The ``custom-`` prefix is - required to avoid name conflicts with Google managed + required to avoid name conflicts with Google-managed configurations. This corresponds to the ``instance_config_id`` field @@ -1176,16 +1176,16 @@ def update_instance_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates an instance config. The returned [long-running + r"""Updates an instance configuration. The returned [long-running operation][google.longrunning.Operation] can be used to track the progress of updating the instance. If the named instance - config does not exist, returns ``NOT_FOUND``. + configuration does not exist, returns ``NOT_FOUND``. - Only user managed configurations can be updated. + Only user-managed configurations can be updated. Immediately after the request returns: - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. @@ -1196,25 +1196,27 @@ def update_instance_config( The operation is guaranteed to succeed at undoing all changes, after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance config are + - All other attempts to modify the instance configuration are rejected. - - Reading the instance config via the API continues to give the - pre-request values. + - Reading the instance configuration via the API continues to + give the pre-request values. Upon completion of the returned operation: - Creating instances using the instance configuration uses the new values. - - The instance config's new values are readable via the API. - - The instance config's + - The new values of the instance configuration are readable via + the API. + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track the instance config modification. The - [metadata][google.longrunning.Operation.metadata] field type is + can be used to track the instance configuration modification. + The [metadata][google.longrunning.Operation.metadata] field type + is [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type is @@ -1259,9 +1261,9 @@ def sample_update_instance_config(): The request object. The request for [UpdateInstanceConfigRequest][InstanceAdmin.UpdateInstanceConfigRequest]. instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The user instance config to update, which must - always include the instance config name. Otherwise, only - fields mentioned in + Required. The user instance configuration to update, + which must always include the instance configuration + name. Otherwise, only fields mentioned in [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] need be included. To prevent conflicts of concurrent updates, @@ -1365,11 +1367,11 @@ def delete_instance_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes the instance config. Deletion is only allowed when no - instances are using the configuration. If any instances are - using the config, returns ``FAILED_PRECONDITION``. + r"""Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. - Only user managed configurations can be deleted. + Only user-managed configurations can be deleted. Authorization requires ``spanner.instanceConfigs.delete`` permission on the resource @@ -1467,9 +1469,9 @@ def list_instance_config_operations( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstanceConfigOperationsPager: - r"""Lists the user-managed instance config [long-running + r"""Lists the user-managed instance configuration [long-running operations][google.longrunning.Operation] in the given project. - An instance config operation has a name of the form + An instance configuration operation has a name of the form ``projects//instanceConfigs//operations/``. The long-running operation [metadata][google.longrunning.Operation.metadata] field type @@ -1512,8 +1514,9 @@ def sample_list_instance_config_operations(): The request object. The request for [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. parent (str): - Required. The project of the instance config operations. - Values are of the form ``projects/``. + Required. The project of the instance configuration + operations. Values are of the form + ``projects/``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3572,6 +3575,170 @@ def sample_list_instance_partition_operations(): # Done; return the response. return response + def move_instance( + self, + request: Optional[ + Union[spanner_instance_admin.MoveInstanceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Moves an instance to the target instance configuration. You can + use the returned [long-running + operation][google.longrunning.Operation] to track the progress + of moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are + subject to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned [long-running + operation][google.longrunning.Operation] has a name of the + format ``/operations/`` and can be + used to track the move instance operation. The + [metadata][google.longrunning.Operation.metadata] field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): + request = spanner_instance_admin.MoveInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.MoveInstanceResponse, + metadata_type=spanner_instance_admin.MoveInstanceMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "InstanceAdminClient": return self diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py index ee70ea889a..5f7711559c 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -297,6 +297,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.move_instance: gapic_v1.method.wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -519,6 +524,15 @@ def list_instance_partition_operations( ]: raise NotImplementedError() + @property + def move_instance( + self, + ) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index 347688dedb..f4c1e97f09 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -345,39 +345,39 @@ def create_instance_config( ]: r"""Return a callable for the create instance config method over gRPC. - Creates an instance config and begins preparing it to be used. - The returned [long-running + Creates an instance configuration and begins preparing it to be + used. The returned [long-running operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance config. The instance - config name is assigned by the caller. If the named instance - config already exists, ``CreateInstanceConfig`` returns - ``ALREADY_EXISTS``. + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. Immediately after the request returns: - - The instance config is readable via the API, with all - requested attributes. The instance config's + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. Its state is ``CREATING``. While the operation is pending: - - Cancelling the operation renders the instance config + - Cancelling the operation renders the instance configuration immediately unreadable via the API. - Except for deleting the creating resource, all other attempts - to modify the instance config are rejected. + to modify the instance configuration are rejected. Upon completion of the returned operation: - Instances can be created using the instance configuration. - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. Its state becomes ``READY``. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track creation of the instance config. The + can be used to track creation of the instance configuration. The [metadata][google.longrunning.Operation.metadata] field type is [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type @@ -415,16 +415,16 @@ def update_instance_config( ]: r"""Return a callable for the update instance config method over gRPC. - Updates an instance config. The returned [long-running + Updates an instance configuration. The returned [long-running operation][google.longrunning.Operation] can be used to track the progress of updating the instance. If the named instance - config does not exist, returns ``NOT_FOUND``. + configuration does not exist, returns ``NOT_FOUND``. - Only user managed configurations can be updated. + Only user-managed configurations can be updated. Immediately after the request returns: - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. @@ -435,25 +435,27 @@ def update_instance_config( The operation is guaranteed to succeed at undoing all changes, after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance config are + - All other attempts to modify the instance configuration are rejected. - - Reading the instance config via the API continues to give the - pre-request values. + - Reading the instance configuration via the API continues to + give the pre-request values. Upon completion of the returned operation: - Creating instances using the instance configuration uses the new values. - - The instance config's new values are readable via the API. - - The instance config's + - The new values of the instance configuration are readable via + the API. + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track the instance config modification. The - [metadata][google.longrunning.Operation.metadata] field type is + can be used to track the instance configuration modification. + The [metadata][google.longrunning.Operation.metadata] field type + is [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type is @@ -490,11 +492,11 @@ def delete_instance_config( ]: r"""Return a callable for the delete instance config method over gRPC. - Deletes the instance config. Deletion is only allowed when no - instances are using the configuration. If any instances are - using the config, returns ``FAILED_PRECONDITION``. + Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. - Only user managed configurations can be deleted. + Only user-managed configurations can be deleted. Authorization requires ``spanner.instanceConfigs.delete`` permission on the resource @@ -528,9 +530,9 @@ def list_instance_config_operations( r"""Return a callable for the list instance config operations method over gRPC. - Lists the user-managed instance config [long-running + Lists the user-managed instance configuration [long-running operations][google.longrunning.Operation] in the given project. - An instance config operation has a name of the form + An instance configuration operation has a name of the form ``projects//instanceConfigs//operations/``. The long-running operation [metadata][google.longrunning.Operation.metadata] field type @@ -1174,6 +1176,99 @@ def list_instance_partition_operations( ) return self._stubs["list_instance_partition_operations"] + @property + def move_instance( + self, + ) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the move instance method over gRPC. + + Moves an instance to the target instance configuration. You can + use the returned [long-running + operation][google.longrunning.Operation] to track the progress + of moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are + subject to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned [long-running + operation][google.longrunning.Operation] has a name of the + format ``/operations/`` and can be + used to track the move instance operation. The + [metadata][google.longrunning.Operation.metadata] field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + Returns: + Callable[[~.MoveInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "move_instance" not in self._stubs: + self._stubs["move_instance"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance", + request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["move_instance"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index b21d57f4fa..ef480a6805 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -352,39 +352,39 @@ def create_instance_config( ]: r"""Return a callable for the create instance config method over gRPC. - Creates an instance config and begins preparing it to be used. - The returned [long-running + Creates an instance configuration and begins preparing it to be + used. The returned [long-running operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance config. The instance - config name is assigned by the caller. If the named instance - config already exists, ``CreateInstanceConfig`` returns - ``ALREADY_EXISTS``. + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. Immediately after the request returns: - - The instance config is readable via the API, with all - requested attributes. The instance config's + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. Its state is ``CREATING``. While the operation is pending: - - Cancelling the operation renders the instance config + - Cancelling the operation renders the instance configuration immediately unreadable via the API. - Except for deleting the creating resource, all other attempts - to modify the instance config are rejected. + to modify the instance configuration are rejected. Upon completion of the returned operation: - Instances can be created using the instance configuration. - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. Its state becomes ``READY``. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track creation of the instance config. The + can be used to track creation of the instance configuration. The [metadata][google.longrunning.Operation.metadata] field type is [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type @@ -423,16 +423,16 @@ def update_instance_config( ]: r"""Return a callable for the update instance config method over gRPC. - Updates an instance config. The returned [long-running + Updates an instance configuration. The returned [long-running operation][google.longrunning.Operation] can be used to track the progress of updating the instance. If the named instance - config does not exist, returns ``NOT_FOUND``. + configuration does not exist, returns ``NOT_FOUND``. - Only user managed configurations can be updated. + Only user-managed configurations can be updated. Immediately after the request returns: - - The instance config's + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field is set to true. @@ -443,25 +443,27 @@ def update_instance_config( The operation is guaranteed to succeed at undoing all changes, after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance config are + - All other attempts to modify the instance configuration are rejected. - - Reading the instance config via the API continues to give the - pre-request values. + - Reading the instance configuration via the API continues to + give the pre-request values. Upon completion of the returned operation: - Creating instances using the instance configuration uses the new values. - - The instance config's new values are readable via the API. - - The instance config's + - The new values of the instance configuration are readable via + the API. + - The instance configuration's [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] field becomes false. The returned [long-running operation][google.longrunning.Operation] will have a name of the format ``/operations/`` and - can be used to track the instance config modification. The - [metadata][google.longrunning.Operation.metadata] field type is + can be used to track the instance configuration modification. + The [metadata][google.longrunning.Operation.metadata] field type + is [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. The [response][google.longrunning.Operation.response] field type is @@ -498,11 +500,11 @@ def delete_instance_config( ]: r"""Return a callable for the delete instance config method over gRPC. - Deletes the instance config. Deletion is only allowed when no - instances are using the configuration. If any instances are - using the config, returns ``FAILED_PRECONDITION``. + Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. - Only user managed configurations can be deleted. + Only user-managed configurations can be deleted. Authorization requires ``spanner.instanceConfigs.delete`` permission on the resource @@ -536,9 +538,9 @@ def list_instance_config_operations( r"""Return a callable for the list instance config operations method over gRPC. - Lists the user-managed instance config [long-running + Lists the user-managed instance configuration [long-running operations][google.longrunning.Operation] in the given project. - An instance config operation has a name of the form + An instance configuration operation has a name of the form ``projects//instanceConfigs//operations/``. The long-running operation [metadata][google.longrunning.Operation.metadata] field type @@ -1188,6 +1190,100 @@ def list_instance_partition_operations( ) return self._stubs["list_instance_partition_operations"] + @property + def move_instance( + self, + ) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the move instance method over gRPC. + + Moves an instance to the target instance configuration. You can + use the returned [long-running + operation][google.longrunning.Operation] to track the progress + of moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are + subject to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned [long-running + operation][google.longrunning.Operation] has a name of the + format ``/operations/`` and can be + used to track the move instance operation. The + [metadata][google.longrunning.Operation.metadata] field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + Returns: + Callable[[~.MoveInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "move_instance" not in self._stubs: + self._stubs["move_instance"] = self.grpc_channel.unary_unary( + "/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance", + request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["move_instance"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1351,6 +1447,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.move_instance: gapic_v1.method_async.wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py index ed152b4220..1a74f0e7f9 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py @@ -182,6 +182,14 @@ def post_list_instances(self, response): logging.log(f"Received response: {response}") return response + def pre_move_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_instance(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_iam_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -560,6 +568,29 @@ def post_list_instances( """ return response + def pre_move_instance( + self, + request: spanner_instance_admin.MoveInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[spanner_instance_admin.MoveInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_move_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for move_instance + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -2285,6 +2316,100 @@ def __call__( resp = self._interceptor.post_list_instances(resp) return resp + class _MoveInstance(InstanceAdminRestStub): + def __hash__(self): + return hash("MoveInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: spanner_instance_admin.MoveInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the move instance method over HTTP. + + Args: + request (~.spanner_instance_admin.MoveInstanceRequest): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/instances/*}:move", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_move_instance(request, metadata) + pb_request = spanner_instance_admin.MoveInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move_instance(resp) + return resp + class _SetIamPolicy(InstanceAdminRestStub): def __hash__(self): return hash("SetIamPolicy") @@ -2988,6 +3113,16 @@ def list_instances( # In C++ this would require a dynamic_cast return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + @property + def move_instance( + self, + ) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveInstance(self._session, self._host, self._interceptor) # type: ignore + @property def set_iam_policy( self, diff --git a/google/cloud/spanner_admin_instance_v1/types/__init__.py b/google/cloud/spanner_admin_instance_v1/types/__init__.py index a3d1028ce9..1b9cd38032 100644 --- a/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -44,6 +44,9 @@ ListInstancePartitionsResponse, ListInstancesRequest, ListInstancesResponse, + MoveInstanceMetadata, + MoveInstanceRequest, + MoveInstanceResponse, ReplicaInfo, UpdateInstanceConfigMetadata, UpdateInstanceConfigRequest, @@ -82,6 +85,9 @@ "ListInstancePartitionsResponse", "ListInstancesRequest", "ListInstancesResponse", + "MoveInstanceMetadata", + "MoveInstanceRequest", + "MoveInstanceResponse", "ReplicaInfo", "UpdateInstanceConfigMetadata", "UpdateInstanceConfigRequest", diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py index 171bf48618..d2bb2d395b 100644 --- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -61,6 +61,9 @@ "ListInstancePartitionsResponse", "ListInstancePartitionOperationsRequest", "ListInstancePartitionOperationsResponse", + "MoveInstanceRequest", + "MoveInstanceResponse", + "MoveInstanceMetadata", }, ) @@ -147,12 +150,15 @@ class InstanceConfig(proto.Message): A unique identifier for the instance configuration. Values are of the form ``projects//instanceConfigs/[a-z][-a-z0-9]*``. + + User instance configuration must start with ``custom-``. display_name (str): The name of this instance configuration as it appears in UIs. config_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.Type): - Output only. Whether this instance config is - a Google or User Managed Configuration. + Output only. Whether this instance + configuration is a Google-managed or + user-managed configuration. replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): The geographic placement of nodes in this instance configuration and their replication @@ -201,30 +207,31 @@ class InstanceConfig(proto.Message): etag (str): etag is used for optimistic concurrency control as a way to help prevent simultaneous - updates of a instance config from overwriting - each other. It is strongly suggested that - systems make use of the etag in the + updates of a instance configuration from + overwriting each other. It is strongly suggested + that systems make use of the etag in the read-modify-write cycle to perform instance - config updates in order to avoid race + configuration updates in order to avoid race conditions: An etag is returned in the response - which contains instance configs, and systems are - expected to put that etag in the request to - update instance config to ensure that their - change will be applied to the same version of - the instance config. - If no etag is provided in the call to update - instance config, then the existing instance - config is overwritten blindly. + which contains instance configurations, and + systems are expected to put that etag in the + request to update instance configuration to + ensure that their change is applied to the same + version of the instance configuration. If no + etag is provided in the call to update the + instance configuration, then the existing + instance configuration is overwritten blindly. leader_options (MutableSequence[str]): Allowed values of the "default_leader" schema option for databases in instances that use this instance configuration. reconciling (bool): - Output only. If true, the instance config is - being created or updated. If false, there are no - ongoing operations for the instance config. + Output only. If true, the instance + configuration is being created or updated. If + false, there are no ongoing operations for the + instance configuration. state (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.State): - Output only. The current instance config - state. + Output only. The current instance configuration state. + Applicable only for ``USER_MANAGED`` configurations. """ class Type(proto.Enum): @@ -243,16 +250,17 @@ class Type(proto.Enum): USER_MANAGED = 2 class State(proto.Enum): - r"""Indicates the current state of the instance config. + r"""Indicates the current state of the instance configuration. Values: STATE_UNSPECIFIED (0): Not specified. CREATING (1): - The instance config is still being created. + The instance configuration is still being + created. READY (2): - The instance config is fully created and - ready to be used to create instances. + The instance configuration is fully created + and ready to be used to create instances. """ STATE_UNSPECIFIED = 0 CREATING = 1 @@ -310,7 +318,7 @@ class State(proto.Enum): class AutoscalingConfig(proto.Message): - r"""Autoscaling config for an instance. + r"""Autoscaling configuration for an instance. Attributes: autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): @@ -521,6 +529,8 @@ class Instance(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which the instance was most recently updated. + edition (google.cloud.spanner_admin_instance_v1.types.Instance.Edition): + Optional. The ``Edition`` of the current instance. """ class State(proto.Enum): @@ -542,6 +552,25 @@ class State(proto.Enum): CREATING = 1 READY = 2 + class Edition(proto.Enum): + r"""The edition selected for this instance. Different editions + provide different capabilities at different price points. + + Values: + EDITION_UNSPECIFIED (0): + Edition not specified. + STANDARD (1): + Standard edition. + ENTERPRISE (2): + Enterprise edition. + ENTERPRISE_PLUS (3): + Enterprise Plus edition. + """ + EDITION_UNSPECIFIED = 0 + STANDARD = 1 + ENTERPRISE = 2 + ENTERPRISE_PLUS = 3 + name: str = proto.Field( proto.STRING, number=1, @@ -591,6 +620,11 @@ class State(proto.Enum): number=12, message=timestamp_pb2.Timestamp, ) + edition: Edition = proto.Field( + proto.ENUM, + number=20, + enum=Edition, + ) class ListInstanceConfigsRequest(proto.Message): @@ -680,14 +714,14 @@ class CreateInstanceConfigRequest(proto.Message): Attributes: parent (str): Required. The name of the project in which to create the - instance config. Values are of the form + instance configuration. Values are of the form ``projects/``. instance_config_id (str): - Required. The ID of the instance config to create. Valid - identifiers are of the form ``custom-[-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. The - ``custom-`` prefix is required to avoid name conflicts with - Google managed configurations. + Required. The ID of the instance configuration to create. + Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. The ``custom-`` prefix is required to + avoid name conflicts with Google-managed configurations. instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): Required. The InstanceConfig proto of the configuration to create. instance_config.name must be @@ -726,9 +760,9 @@ class UpdateInstanceConfigRequest(proto.Message): Attributes: instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The user instance config to update, which must - always include the instance config name. Otherwise, only - fields mentioned in + Required. The user instance configuration to update, which + must always include the instance configuration name. + Otherwise, only fields mentioned in [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] need be included. To prevent conflicts of concurrent updates, @@ -776,13 +810,14 @@ class DeleteInstanceConfigRequest(proto.Message): etag (str): Used for optimistic concurrency control as a way to help prevent simultaneous deletes of an - instance config from overwriting each other. If - not empty, the API - only deletes the instance config when the etag - provided matches the current status of the - requested instance config. Otherwise, deletes - the instance config without checking the current - status of the requested instance config. + instance configuration from overwriting each + other. If not empty, the API + only deletes the instance configuration when the + etag provided matches the current status of the + requested instance configuration. Otherwise, + deletes the instance configuration without + checking the current status of the requested + instance configuration. validate_only (bool): An option to validate, but not actually execute, a request, and provide the same @@ -809,8 +844,8 @@ class ListInstanceConfigOperationsRequest(proto.Message): Attributes: parent (str): - Required. The project of the instance config operations. - Values are of the form ``projects/``. + Required. The project of the instance configuration + operations. Values are of the form ``projects/``. filter (str): An expression that filters the list of returned operations. @@ -857,7 +892,8 @@ class ListInstanceConfigOperationsRequest(proto.Message): - The operation's metadata type is [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - - The instance config name contains "custom-config". + - The instance configuration name contains + "custom-config". - The operation started before 2021-03-28T14:50:00Z. - The operation resulted in an error. page_size (int): @@ -896,10 +932,10 @@ class ListInstanceConfigOperationsResponse(proto.Message): Attributes: operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching instance config [long-running + The list of matching instance configuration [long-running operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the instance config's name. The - operation's + name will be prefixed by the name of the instance + configuration. The operation's [metadata][google.longrunning.Operation.metadata] field type ``metadata.type_url`` describes the type of the metadata. next_page_token (str): @@ -1247,7 +1283,7 @@ class CreateInstanceConfigMetadata(proto.Message): Attributes: instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - The target instance config end state. + The target instance configuration end state. progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): The progress of the [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig] @@ -1280,7 +1316,8 @@ class UpdateInstanceConfigMetadata(proto.Message): Attributes: instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - The desired instance config after updating. + The desired instance configuration after + updating. progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): The progress of the [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig] @@ -1898,4 +1935,71 @@ def raw_page(self): ) +class MoveInstanceRequest(proto.Message): + r"""The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + Attributes: + name (str): + Required. The instance to move. Values are of the form + ``projects//instances/``. + target_config (str): + Required. The target instance configuration where to move + the instance. Values are of the form + ``projects//instanceConfigs/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + target_config: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MoveInstanceResponse(proto.Message): + r"""The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + + +class MoveInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + Attributes: + target_config (str): + The target instance configuration where to move the + instance. Values are of the form + ``projects//instanceConfigs/``. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance] + operation. + [progress_percent][google.spanner.admin.instance.v1.OperationProgress.progress_percent] + is reset when cancellation is requested. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + target_config: str = proto.Field( + proto.STRING, + number=1, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index e1c6271710..992a74503c 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -194,9 +193,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SpannerClient).get_transport_class, type(SpannerClient) - ) + get_transport_class = SpannerClient.get_transport_class def __init__( self, diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index 7a07fe86c1..96b90bb21c 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -690,7 +690,7 @@ def __init__( transport_init: Union[ Type[SpannerTransport], Callable[..., SpannerTransport] ] = ( - type(self).get_transport_class(transport) + SpannerClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SpannerTransport], transport) ) diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index 1eab73422e..86a6b4fa78 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.48.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 1ae7294c61..ac2f8c24ec 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.48.0" + "version": "0.1.0" }, "snippets": [ { @@ -2456,6 +2456,159 @@ ], "title": "spanner_v1_generated_instance_admin_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.move_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "spanner_v1_generated_instance_admin_move_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_move_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.move_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "spanner_v1_generated_instance_admin_move_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_move_instance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index 70e86962ed..4384d19e2a 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.48.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py new file mode 100644 index 0000000000..6530706620 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_MoveInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_MoveInstance_async] diff --git a/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py new file mode 100644 index 0000000000..32d1c4f5b1 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_MoveInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_MoveInstance_sync] diff --git a/scripts/fixup_spanner_admin_instance_v1_keywords.py b/scripts/fixup_spanner_admin_instance_v1_keywords.py index 321014ad94..3b5fa8afb6 100644 --- a/scripts/fixup_spanner_admin_instance_v1_keywords.py +++ b/scripts/fixup_spanner_admin_instance_v1_keywords.py @@ -54,6 +54,7 @@ class spanner_admin_instanceCallTransformer(cst.CSTTransformer): 'list_instance_partition_operations': ('parent', 'filter', 'page_size', 'page_token', 'instance_partition_deadline', ), 'list_instance_partitions': ('parent', 'page_size', 'page_token', 'instance_partition_deadline', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'instance_deadline', ), + 'move_instance': ('name', 'target_config', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_instance': ('instance', 'field_mask', ), diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index ce196a15f8..bdec708615 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -1312,22 +1312,23 @@ async def test_list_databases_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_databases - ] = mock_object + ] = mock_rpc request = {} await client.list_databases(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_databases(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1817,8 +1818,9 @@ def test_create_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_database(request) @@ -1872,26 +1874,28 @@ async def test_create_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_database - ] = mock_object + ] = mock_rpc request = {} await client.create_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2266,22 +2270,23 @@ async def test_get_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_database - ] = mock_object + ] = mock_rpc request = {} await client.get_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2583,8 +2588,9 @@ def test_update_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_database(request) @@ -2638,26 +2644,28 @@ async def test_update_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_database - ] = mock_object + ] = mock_rpc request = {} await client.update_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2970,8 +2978,9 @@ def test_update_database_ddl_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_database_ddl(request) @@ -3027,26 +3036,28 @@ async def test_update_database_ddl_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_database_ddl - ] = mock_object + ] = mock_rpc request = {} await client.update_database_ddl(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_database_ddl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3406,22 +3417,23 @@ async def test_drop_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.drop_database - ] = mock_object + ] = mock_rpc request = {} await client.drop_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.drop_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3767,22 +3779,23 @@ async def test_get_database_ddl_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_database_ddl - ] = mock_object + ] = mock_rpc request = {} await client.get_database_ddl(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_database_ddl(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4137,22 +4150,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4520,22 +4534,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4911,22 +4926,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5261,8 +5277,9 @@ def test_create_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_backup(request) @@ -5316,26 +5333,28 @@ async def test_create_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup - ] = mock_object + ] = mock_rpc request = {} await client.create_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5649,8 +5668,9 @@ def test_copy_backup_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.copy_backup(request) @@ -5704,26 +5724,28 @@ async def test_copy_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.copy_backup - ] = mock_object + ] = mock_rpc request = {} await client.copy_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.copy_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5959,11 +5981,14 @@ def test_get_backup(request_type, transport: str = "grpc"): database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) response = client.get_backup(request) @@ -5978,11 +6003,14 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.database == "database_value" assert response.name == "name_value" assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 assert response.state == backup.Backup.State.CREATING assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] assert response.backup_schedules == ["backup_schedules_value"] + assert response.incremental_backup_chain_id == "incremental_backup_chain_id_value" def test_get_backup_empty_call(): @@ -6084,11 +6112,14 @@ async def test_get_backup_empty_call_async(): database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) ) response = await client.get_backup() @@ -6118,22 +6149,23 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup - ] = mock_object + ] = mock_rpc request = {} await client.get_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6157,11 +6189,14 @@ async def test_get_backup_async( database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) ) response = await client.get_backup(request) @@ -6177,11 +6212,14 @@ async def test_get_backup_async( assert response.database == "database_value" assert response.name == "name_value" assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 assert response.state == backup.Backup.State.CREATING assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] assert response.backup_schedules == ["backup_schedules_value"] + assert response.incremental_backup_chain_id == "incremental_backup_chain_id_value" @pytest.mark.asyncio @@ -6352,11 +6390,14 @@ def test_update_backup(request_type, transport: str = "grpc"): database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=gsad_backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) response = client.update_backup(request) @@ -6371,11 +6412,14 @@ def test_update_backup(request_type, transport: str = "grpc"): assert response.database == "database_value" assert response.name == "name_value" assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 assert response.state == gsad_backup.Backup.State.CREATING assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] assert response.backup_schedules == ["backup_schedules_value"] + assert response.incremental_backup_chain_id == "incremental_backup_chain_id_value" def test_update_backup_empty_call(): @@ -6473,11 +6517,14 @@ async def test_update_backup_empty_call_async(): database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=gsad_backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) ) response = await client.update_backup() @@ -6509,22 +6556,23 @@ async def test_update_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup - ] = mock_object + ] = mock_rpc request = {} await client.update_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6548,11 +6596,14 @@ async def test_update_backup_async( database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=gsad_backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) ) response = await client.update_backup(request) @@ -6568,11 +6619,14 @@ async def test_update_backup_async( assert response.database == "database_value" assert response.name == "name_value" assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 assert response.state == gsad_backup.Backup.State.CREATING assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] assert response.backup_schedules == ["backup_schedules_value"] + assert response.incremental_backup_chain_id == "incremental_backup_chain_id_value" @pytest.mark.asyncio @@ -6886,22 +6940,23 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7245,22 +7300,23 @@ async def test_list_backups_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backups - ] = mock_object + ] = mock_rpc request = {} await client.list_backups(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7753,8 +7809,9 @@ def test_restore_database_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.restore_database(request) @@ -7808,26 +7865,28 @@ async def test_restore_database_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.restore_database - ] = mock_object + ] = mock_rpc request = {} await client.restore_database(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.restore_database(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8207,22 +8266,23 @@ async def test_list_database_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_database_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_database_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_database_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8803,22 +8863,23 @@ async def test_list_backup_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backup_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_backup_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backup_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9393,22 +9454,23 @@ async def test_list_database_roles_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_database_roles - ] = mock_object + ] = mock_rpc request = {} await client.list_database_roles(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_database_roles(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9987,22 +10049,23 @@ async def test_create_backup_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_backup_schedule - ] = mock_object + ] = mock_rpc request = {} await client.create_backup_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10394,22 +10457,23 @@ async def test_get_backup_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_backup_schedule - ] = mock_object + ] = mock_rpc request = {} await client.get_backup_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10778,22 +10842,23 @@ async def test_update_backup_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_backup_schedule - ] = mock_object + ] = mock_rpc request = {} await client.update_backup_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11169,22 +11234,23 @@ async def test_delete_backup_schedule_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_backup_schedule - ] = mock_object + ] = mock_rpc request = {} await client.delete_backup_schedule(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_backup_schedule(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -11550,22 +11616,23 @@ async def test_list_backup_schedules_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_backup_schedules - ] = mock_object + ] = mock_rpc request = {} await client.list_backup_schedules(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_backup_schedules(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -15251,6 +15318,8 @@ def test_create_backup_rest(request_type): "name": "name_value", "create_time": {}, "size_bytes": 1089, + "freeable_size_bytes": 2006, + "exclusive_size_bytes": 2168, "state": 1, "referencing_databases": [ "referencing_databases_value1", @@ -15278,6 +15347,8 @@ def test_create_backup_rest(request_type): ], "max_expire_time": {}, "backup_schedules": ["backup_schedules_value1", "backup_schedules_value2"], + "incremental_backup_chain_id": "incremental_backup_chain_id_value", + "oldest_version_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -16012,11 +16083,14 @@ def test_get_backup_rest(request_type): database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) # Wrap the value into a proper Response obj @@ -16035,11 +16109,14 @@ def test_get_backup_rest(request_type): assert response.database == "database_value" assert response.name == "name_value" assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 assert response.state == backup.Backup.State.CREATING assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] assert response.backup_schedules == ["backup_schedules_value"] + assert response.incremental_backup_chain_id == "incremental_backup_chain_id_value" def test_get_backup_rest_use_cached_wrapped_rpc(): @@ -16322,6 +16399,8 @@ def test_update_backup_rest(request_type): "name": "projects/sample1/instances/sample2/backups/sample3", "create_time": {}, "size_bytes": 1089, + "freeable_size_bytes": 2006, + "exclusive_size_bytes": 2168, "state": 1, "referencing_databases": [ "referencing_databases_value1", @@ -16349,6 +16428,8 @@ def test_update_backup_rest(request_type): ], "max_expire_time": {}, "backup_schedules": ["backup_schedules_value1", "backup_schedules_value2"], + "incremental_backup_chain_id": "incremental_backup_chain_id_value", + "oldest_version_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -16426,11 +16507,14 @@ def get_message_fields(field): database="database_value", name="name_value", size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, state=gsad_backup.Backup.State.CREATING, referencing_databases=["referencing_databases_value"], database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, referencing_backups=["referencing_backups_value"], backup_schedules=["backup_schedules_value"], + incremental_backup_chain_id="incremental_backup_chain_id_value", ) # Wrap the value into a proper Response obj @@ -16449,11 +16533,14 @@ def get_message_fields(field): assert response.database == "database_value" assert response.name == "name_value" assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 assert response.state == gsad_backup.Backup.State.CREATING assert response.referencing_databases == ["referencing_databases_value"] assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL assert response.referencing_backups == ["referencing_backups_value"] assert response.backup_schedules == ["backup_schedules_value"] + assert response.incremental_backup_chain_id == "incremental_backup_chain_id_value" def test_update_backup_rest_use_cached_wrapped_rpc(): @@ -18890,6 +18977,7 @@ def test_create_backup_schedule_rest(request_type): "kms_key_names": ["kms_key_names_value1", "kms_key_names_value2"], }, "full_backup_spec": {}, + "incremental_backup_spec": {}, "update_time": {"seconds": 751, "nanos": 543}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -19634,6 +19722,7 @@ def test_update_backup_schedule_rest(request_type): "kms_key_names": ["kms_key_names_value1", "kms_key_names_value2"], }, "full_backup_spec": {}, + "incremental_backup_spec": {}, "update_time": {"seconds": 751, "nanos": 543}, } # The version of a generated dependency at test runtime may differ from the version used during generation. diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index 4550c4a585..e150adcf1c 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -1314,22 +1314,23 @@ async def test_list_instance_configs_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_configs - ] = mock_object + ] = mock_rpc request = {} await client.list_instance_configs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instance_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1931,22 +1932,23 @@ async def test_get_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_config - ] = mock_object + ] = mock_rpc request = {} await client.get_instance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2280,8 +2282,9 @@ def test_create_instance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance_config(request) @@ -2337,26 +2340,28 @@ async def test_create_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance_config - ] = mock_object + ] = mock_rpc request = {} await client.create_instance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2684,8 +2689,9 @@ def test_update_instance_config_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance_config(request) @@ -2741,26 +2747,28 @@ async def test_update_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance_config - ] = mock_object + ] = mock_rpc request = {} await client.update_instance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3135,22 +3143,23 @@ async def test_delete_instance_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance_config - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_instance_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3518,22 +3527,23 @@ async def test_list_instance_config_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_config_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_instance_config_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instance_config_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4110,22 +4120,23 @@ async def test_list_instances_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instances - ] = mock_object + ] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4689,22 +4700,23 @@ async def test_list_instance_partitions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_partitions - ] = mock_object + ] = mock_rpc request = {} await client.list_instance_partitions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instance_partitions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5138,6 +5150,7 @@ def test_get_instance(request_type, transport: str = "grpc"): processing_units=1743, state=spanner_instance_admin.Instance.State.CREATING, endpoint_uris=["endpoint_uris_value"], + edition=spanner_instance_admin.Instance.Edition.STANDARD, ) response = client.get_instance(request) @@ -5156,6 +5169,7 @@ def test_get_instance(request_type, transport: str = "grpc"): assert response.processing_units == 1743 assert response.state == spanner_instance_admin.Instance.State.CREATING assert response.endpoint_uris == ["endpoint_uris_value"] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD def test_get_instance_empty_call(): @@ -5261,6 +5275,7 @@ async def test_get_instance_empty_call_async(): processing_units=1743, state=spanner_instance_admin.Instance.State.CREATING, endpoint_uris=["endpoint_uris_value"], + edition=spanner_instance_admin.Instance.Edition.STANDARD, ) ) response = await client.get_instance() @@ -5292,22 +5307,23 @@ async def test_get_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance - ] = mock_object + ] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5336,6 +5352,7 @@ async def test_get_instance_async( processing_units=1743, state=spanner_instance_admin.Instance.State.CREATING, endpoint_uris=["endpoint_uris_value"], + edition=spanner_instance_admin.Instance.Edition.STANDARD, ) ) response = await client.get_instance(request) @@ -5355,6 +5372,7 @@ async def test_get_instance_async( assert response.processing_units == 1743 assert response.state == spanner_instance_admin.Instance.State.CREATING assert response.endpoint_uris == ["endpoint_uris_value"] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD @pytest.mark.asyncio @@ -5615,8 +5633,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -5670,26 +5689,28 @@ async def test_create_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance - ] = mock_object + ] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5996,8 +6017,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -6051,26 +6073,28 @@ async def test_update_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance - ] = mock_object + ] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6420,22 +6444,23 @@ async def test_delete_instance_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6779,22 +6804,23 @@ async def test_set_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.set_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7162,22 +7188,23 @@ async def test_get_iam_policy_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_iam_policy - ] = mock_object + ] = mock_rpc request = {} await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7553,22 +7580,23 @@ async def test_test_iam_permissions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.test_iam_permissions - ] = mock_object + ] = mock_rpc request = {} await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7989,22 +8017,23 @@ async def test_get_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_instance_partition - ] = mock_object + ] = mock_rpc request = {} await client.get_instance_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8333,8 +8362,9 @@ def test_create_instance_partition_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance_partition(request) @@ -8390,26 +8420,28 @@ async def test_create_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_instance_partition - ] = mock_object + ] = mock_rpc request = {} await client.create_instance_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8802,22 +8834,23 @@ async def test_delete_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_instance_partition - ] = mock_object + ] = mock_rpc request = {} await client.delete_instance_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_instance_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9119,8 +9152,9 @@ def test_update_instance_partition_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance_partition(request) @@ -9176,26 +9210,28 @@ async def test_update_instance_partition_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_instance_partition - ] = mock_object + ] = mock_rpc request = {} await client.update_instance_partition(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance_partition(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -9602,22 +9638,23 @@ async def test_list_instance_partition_operations_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_instance_partition_operations - ] = mock_object + ] = mock_rpc request = {} await client.list_instance_partition_operations(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instance_partition_operations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -10032,52 +10069,92 @@ async def test_list_instance_partition_operations_async_pages(): @pytest.mark.parametrize( "request_type", [ - spanner_instance_admin.ListInstanceConfigsRequest, + spanner_instance_admin.MoveInstanceRequest, dict, ], ) -def test_list_instance_configs_rest(request_type): +def test_move_instance(request_type, transport: str = "grpc"): client = InstanceAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token="next_page_token_value", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.move_instance(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( - return_value + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.MoveInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_move_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - json_return_value = json_format.MessageToJson(return_value) + client.move_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.MoveInstanceRequest() - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_instance_configs(request) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsPager) - assert response.next_page_token == "next_page_token_value" +def test_move_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.move_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) -def test_list_instance_configs_rest_use_cached_wrapped_rpc(): +def test_move_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = InstanceAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10085,76 +10162,324 @@ def test_list_instance_configs_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_instance_configs - in client._transport._wrapped_methods - ) + assert client._transport.move_instance in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_instance_configs - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc request = {} - client.list_instance_configs(request) + client.move_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_instance_configs(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_instance_configs_rest_required_fields( - request_type=spanner_instance_admin.ListInstanceConfigsRequest, -): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_move_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.move_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.MoveInstanceRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_instance_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_move_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_instance_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.move_instance + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.move_instance + ] = mock_rpc - client = InstanceAdminClient( + request = {} + await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_move_instance_async( + transport: str = "grpc_asyncio", + request_type=spanner_instance_admin.MoveInstanceRequest, +): + client = InstanceAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.MoveInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_move_instance_async_from_dict(): + await test_move_instance_async(request_type=dict) + + +def test_move_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.MoveInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_move_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.MoveInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + spanner_instance_admin.ListInstanceConfigsRequest, + dict, + ], +) +def test_list_instance_configs_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_instance_configs + ] = mock_rpc + + request = {} + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_configs_rest_required_fields( + request_type=spanner_instance_admin.ListInstanceConfigsRequest, +): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) request = request_type(**request_init) @@ -12893,6 +13218,7 @@ def test_get_instance_rest(request_type): processing_units=1743, state=spanner_instance_admin.Instance.State.CREATING, endpoint_uris=["endpoint_uris_value"], + edition=spanner_instance_admin.Instance.Edition.STANDARD, ) # Wrap the value into a proper Response obj @@ -12915,6 +13241,7 @@ def test_get_instance_rest(request_type): assert response.processing_units == 1743 assert response.state == spanner_instance_admin.Instance.State.CREATING assert response.endpoint_uris == ["endpoint_uris_value"] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD def test_get_instance_rest_use_cached_wrapped_rpc(): @@ -16691,6 +17018,263 @@ def test_list_instance_partition_operations_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + spanner_instance_admin.MoveInstanceRequest, + dict, + ], +) +def test_move_instance_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.move_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_move_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + + request = {} + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_move_instance_rest_required_fields( + request_type=spanner_instance_admin.MoveInstanceRequest, +): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request_init["target_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["targetConfig"] = "target_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "targetConfig" in jsonified_request + assert jsonified_request["targetConfig"] == "target_config_value" + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.move_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_move_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.move_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "targetConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.InstanceAdminRestInterceptor, "post_move_instance" + ) as post, mock.patch.object( + transports.InstanceAdminRestInterceptor, "pre_move_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner_instance_admin.MoveInstanceRequest.pb( + spanner_instance_admin.MoveInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = spanner_instance_admin.MoveInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.move_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_instance_rest_bad_request( + transport: str = "rest", request_type=spanner_instance_admin.MoveInstanceRequest +): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_instance(request) + + +def test_move_instance_rest_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.InstanceAdminGrpcTransport( @@ -16850,6 +17434,7 @@ def test_instance_admin_base_transport(): "delete_instance_partition", "update_instance_partition", "list_instance_partition_operations", + "move_instance", ) for method in methods: with pytest.raises(NotImplementedError): @@ -17202,6 +17787,9 @@ def test_instance_admin_client_transport_session_collision(transport_name): session1 = client1.transport.list_instance_partition_operations._session session2 = client2.transport.list_instance_partition_operations._session assert session1 != session2 + session1 = client1.transport.move_instance._session + session2 = client2.transport.move_instance._session + assert session1 != session2 def test_instance_admin_grpc_transport_channel(): diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index 70ba97827e..d49f450e86 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -1236,22 +1236,23 @@ async def test_create_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_session - ] = mock_object + ] = mock_rpc request = {} await client.create_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1608,22 +1609,23 @@ async def test_batch_create_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_create_sessions - ] = mock_object + ] = mock_rpc request = {} await client.batch_create_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_create_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1995,22 +1997,23 @@ async def test_get_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_session - ] = mock_object + ] = mock_rpc request = {} await client.get_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2363,22 +2366,23 @@ async def test_list_sessions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_sessions - ] = mock_object + ] = mock_rpc request = {} await client.list_sessions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sessions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2914,22 +2918,23 @@ async def test_delete_session_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_session - ] = mock_object + ] = mock_rpc request = {} await client.delete_session(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_session(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3266,22 +3271,23 @@ async def test_execute_sql_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_sql - ] = mock_object + ] = mock_rpc request = {} await client.execute_sql(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_sql(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3557,22 +3563,23 @@ async def test_execute_streaming_sql_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_streaming_sql - ] = mock_object + ] = mock_rpc request = {} await client.execute_streaming_sql(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_streaming_sql(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3850,22 +3857,23 @@ async def test_execute_batch_dml_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.execute_batch_dml - ] = mock_object + ] = mock_rpc request = {} await client.execute_batch_dml(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.execute_batch_dml(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4131,22 +4139,23 @@ async def test_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.read - ] = mock_object + ] = mock_rpc request = {} await client.read(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.read(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4411,22 +4420,23 @@ async def test_streaming_read_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_read - ] = mock_object + ] = mock_rpc request = {} await client.streaming_read(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_read(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4703,22 +4713,23 @@ async def test_begin_transaction_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.begin_transaction - ] = mock_object + ] = mock_rpc request = {} await client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.begin_transaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5104,22 +5115,23 @@ async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit - ] = mock_object + ] = mock_rpc request = {} await client.commit(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5512,22 +5524,23 @@ async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback - ] = mock_object + ] = mock_rpc request = {} await client.rollback(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5874,22 +5887,23 @@ async def test_partition_query_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.partition_query - ] = mock_object + ] = mock_rpc request = {} await client.partition_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.partition_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6152,22 +6166,23 @@ async def test_partition_read_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.partition_read - ] = mock_object + ] = mock_rpc request = {} await client.partition_read(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.partition_read(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6428,22 +6443,23 @@ async def test_batch_write_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.batch_write - ] = mock_object + ] = mock_rpc request = {} await client.batch_write(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_write(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From 3c91a0165bc658fb3ca3f7080603aa47060a5ecd Mon Sep 17 00:00:00 2001 From: Sanjeev Bhatt Date: Mon, 19 Aug 2024 14:23:03 +0530 Subject: [PATCH 05/10] test(spanner): Refactoring testdata (#1184) * chore(spanner): Issue1180# [Refactoring] Create a copy of samples/samples/testdata in tests * created copy in tests/system and test/unit * updated references * chore(spanner): Issue1180# [Refactoring] Create a copy of samples/samples/testdata in tests * updated formatting (nox -s blacken) --------- Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- tests/system/_sample_data.py | 2 +- tests/system/test_session_api.py | 2 +- tests/system/testdata/singer.proto | 17 +++++++++++++++++ tests/system/testdata/singer_pb2.py | 29 +++++++++++++++++++++++++++++ tests/unit/test__helpers.py | 8 ++++---- tests/unit/test_param_types.py | 4 ++-- tests/unit/testdata/singer.proto | 17 +++++++++++++++++ tests/unit/testdata/singer_pb2.py | 29 +++++++++++++++++++++++++++++ 8 files changed, 100 insertions(+), 8 deletions(-) create mode 100644 tests/system/testdata/singer.proto create mode 100644 tests/system/testdata/singer_pb2.py create mode 100644 tests/unit/testdata/singer.proto create mode 100644 tests/unit/testdata/singer_pb2.py diff --git a/tests/system/_sample_data.py b/tests/system/_sample_data.py index 41f41c9fe5..f23110c5dd 100644 --- a/tests/system/_sample_data.py +++ b/tests/system/_sample_data.py @@ -18,7 +18,7 @@ from google.api_core import datetime_helpers from google.cloud._helpers import UTC from google.cloud import spanner_v1 -from samples.samples.testdata import singer_pb2 +from .testdata import singer_pb2 TABLE = "contacts" COLUMNS = ("contact_id", "first_name", "last_name", "email") diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 00fdf828da..31e38f967a 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -29,7 +29,7 @@ from google.cloud.spanner_admin_database_v1 import DatabaseDialect from google.cloud._helpers import UTC from google.cloud.spanner_v1.data_types import JsonObject -from samples.samples.testdata import singer_pb2 +from .testdata import singer_pb2 from tests import _helpers as ot_helpers from . import _helpers from . import _sample_data diff --git a/tests/system/testdata/singer.proto b/tests/system/testdata/singer.proto new file mode 100644 index 0000000000..1a995614a7 --- /dev/null +++ b/tests/system/testdata/singer.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package examples.spanner.music; + +message SingerInfo { + optional int64 singer_id = 1; + optional string birth_date = 2; + optional string nationality = 3; + optional Genre genre = 4; +} + +enum Genre { + POP = 0; + JAZZ = 1; + FOLK = 2; + ROCK = 3; +} diff --git a/tests/system/testdata/singer_pb2.py b/tests/system/testdata/singer_pb2.py new file mode 100644 index 0000000000..51b049865c --- /dev/null +++ b/tests/system/testdata/singer_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: singer.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0csinger.proto\x12\x16\x65xamples.spanner.music"\xc1\x01\n\nSingerInfo\x12\x16\n\tsinger_id\x18\x01 \x01(\x03H\x00\x88\x01\x01\x12\x17\n\nbirth_date\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0bnationality\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x31\n\x05genre\x18\x04 \x01(\x0e\x32\x1d.examples.spanner.music.GenreH\x03\x88\x01\x01\x42\x0c\n\n_singer_idB\r\n\x0b_birth_dateB\x0e\n\x0c_nationalityB\x08\n\x06_genre*.\n\x05Genre\x12\x07\n\x03POP\x10\x00\x12\x08\n\x04JAZZ\x10\x01\x12\x08\n\x04\x46OLK\x10\x02\x12\x08\n\x04ROCK\x10\x03\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "singer_pb2", _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals["_GENRE"]._serialized_start = 236 + _globals["_GENRE"]._serialized_end = 282 + _globals["_SINGERINFO"]._serialized_start = 41 + _globals["_SINGERINFO"]._serialized_end = 234 +# @@protoc_insertion_point(module_scope) diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 11adec6ac9..e62bff2a2e 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -356,7 +356,7 @@ def test_w_json_None(self): def test_w_proto_message(self): from google.protobuf.struct_pb2 import Value import base64 - from samples.samples.testdata import singer_pb2 + from .testdata import singer_pb2 singer_info = singer_pb2.SingerInfo() expected = Value(string_value=base64.b64encode(singer_info.SerializeToString())) @@ -366,7 +366,7 @@ def test_w_proto_message(self): def test_w_proto_enum(self): from google.protobuf.struct_pb2 import Value - from samples.samples.testdata import singer_pb2 + from .testdata import singer_pb2 value_pb = self._callFUT(singer_pb2.Genre.ROCK) self.assertIsInstance(value_pb, Value) @@ -710,7 +710,7 @@ def test_w_proto_message(self): from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode import base64 - from samples.samples.testdata import singer_pb2 + from .testdata import singer_pb2 VALUE = singer_pb2.SingerInfo() field_type = Type(code=TypeCode.PROTO) @@ -726,7 +726,7 @@ def test_w_proto_enum(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode - from samples.samples.testdata import singer_pb2 + from .testdata import singer_pb2 VALUE = "ROCK" field_type = Type(code=TypeCode.ENUM) diff --git a/tests/unit/test_param_types.py b/tests/unit/test_param_types.py index a7069543c8..1b0660614a 100644 --- a/tests/unit/test_param_types.py +++ b/tests/unit/test_param_types.py @@ -94,7 +94,7 @@ def test_it(self): from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1 import param_types - from samples.samples.testdata import singer_pb2 + from .testdata import singer_pb2 singer_info = singer_pb2.SingerInfo() expected = Type( @@ -111,7 +111,7 @@ def test_it(self): from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1 import param_types - from samples.samples.testdata import singer_pb2 + from .testdata import singer_pb2 singer_genre = singer_pb2.Genre expected = Type( diff --git a/tests/unit/testdata/singer.proto b/tests/unit/testdata/singer.proto new file mode 100644 index 0000000000..1a995614a7 --- /dev/null +++ b/tests/unit/testdata/singer.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package examples.spanner.music; + +message SingerInfo { + optional int64 singer_id = 1; + optional string birth_date = 2; + optional string nationality = 3; + optional Genre genre = 4; +} + +enum Genre { + POP = 0; + JAZZ = 1; + FOLK = 2; + ROCK = 3; +} diff --git a/tests/unit/testdata/singer_pb2.py b/tests/unit/testdata/singer_pb2.py new file mode 100644 index 0000000000..51b049865c --- /dev/null +++ b/tests/unit/testdata/singer_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: singer.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0csinger.proto\x12\x16\x65xamples.spanner.music"\xc1\x01\n\nSingerInfo\x12\x16\n\tsinger_id\x18\x01 \x01(\x03H\x00\x88\x01\x01\x12\x17\n\nbirth_date\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0bnationality\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x31\n\x05genre\x18\x04 \x01(\x0e\x32\x1d.examples.spanner.music.GenreH\x03\x88\x01\x01\x42\x0c\n\n_singer_idB\r\n\x0b_birth_dateB\x0e\n\x0c_nationalityB\x08\n\x06_genre*.\n\x05Genre\x12\x07\n\x03POP\x10\x00\x12\x08\n\x04JAZZ\x10\x01\x12\x08\n\x04\x46OLK\x10\x02\x12\x08\n\x04ROCK\x10\x03\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "singer_pb2", _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals["_GENRE"]._serialized_start = 236 + _globals["_GENRE"]._serialized_end = 282 + _globals["_SINGERINFO"]._serialized_start = 41 + _globals["_SINGERINFO"]._serialized_end = 234 +# @@protoc_insertion_point(module_scope) From 44434aaa501c7097920140115074521c8ab87f63 Mon Sep 17 00:00:00 2001 From: Sanjeev Bhatt Date: Mon, 26 Aug 2024 10:49:41 +0530 Subject: [PATCH 06/10] chore(spanner): Issue#1143 - Update dependency (#1158) * chore(spanner): Issue#1143 - Update dependency - Move grpc-interceptor to extras_required named testing * chore(spanner): Issue#1143 - Update dependency - Move grpc-interceptor to extras_required named testing * chore(spanner): Issue#1143 - Update dependency - add dependency 'testing' for pretest * chore(spanner): Issue#1143 - Update dependency - add dependency 'testing' for docs and docfx sessions * chore(spanner): Issue#1143 - Update dependency - Added "testing" dependency to owlbot.py - Fixed lint error --------- Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- noxfile.py | 9 +++++---- owlbot.py | 6 +++--- setup.py | 3 +-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/noxfile.py b/noxfile.py index 3b656a758c..e599d96369 100644 --- a/noxfile.py +++ b/noxfile.py @@ -59,6 +59,7 @@ SYSTEM_TEST_DEPENDENCIES: List[str] = [] SYSTEM_TEST_EXTRAS: List[str] = [ "tracing", + "testing", ] SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} @@ -165,7 +166,7 @@ def install_unittest_dependencies(session, *constraints): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".[tracing]", "-c", constraints_path) + session.install("-e", ".[tracing, testing]", "-c", constraints_path) # XXX: Dump installed versions to debug OT issue session.run("pip", "list") @@ -336,7 +337,7 @@ def cover(session): def docs(session): """Build the docs for this library.""" - session.install("-e", ".[tracing]") + session.install("-e", ".[tracing, testing]") session.install( # We need to pin to specific versions of the `sphinxcontrib-*` packages # which still support sphinx 4.x. @@ -371,7 +372,7 @@ def docs(session): def docfx(session): """Build the docfx yaml files for this library.""" - session.install("-e", ".[tracing]") + session.install("-e", ".[tracing, testing]") session.install( # We need to pin to specific versions of the `sphinxcontrib-*` packages # which still support sphinx 4.x. @@ -432,7 +433,7 @@ def prerelease_deps(session, protobuf_implementation, database_dialect): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".[all, tests, tracing, testing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( diff --git a/owlbot.py b/owlbot.py index e9c12e593c..b7f09f2f74 100644 --- a/owlbot.py +++ b/owlbot.py @@ -128,7 +128,7 @@ def get_staging_dirs( samples=True, cov_level=98, split_system_tests=True, - system_test_extras=["tracing"], + system_test_extras=["tracing", "testing"], ) s.move( templated_files, @@ -180,7 +180,7 @@ def place_before(path, text, *before_text, escape=None): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".[tracing]", "-c", constraints_path) + session.install("-e", ".[tracing, testing]", "-c", constraints_path) # XXX: Dump installed versions to debug OT issue session.run("pip", "list") @@ -229,7 +229,7 @@ def place_before(path, text, *before_text, escape=None): s.replace( "noxfile.py", r"""session.install\("-e", "."\)""", - """session.install("-e", ".[tracing]")""", + """session.install("-e", ".[tracing, testing]")""", ) # Apply manual changes from PR https://github.com/googleapis/python-spanner/pull/759 diff --git a/setup.py b/setup.py index 98b1a61748..5df9c6d82e 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,6 @@ name = "google-cloud-spanner" - description = "Google Cloud Spanner API client library" version = {} @@ -43,7 +42,6 @@ "sqlparse >= 0.4.4", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-interceptor >= 0.15.4", ] extras = { "tracing": [ @@ -52,6 +50,7 @@ "opentelemetry-instrumentation >= 0.20b0, < 0.23dev", ], "libcst": "libcst >= 0.2.5", + "testing": "grpc-interceptor >= 0.15.4", } url = "https://github.com/googleapis/python-spanner" From bd62d7c77475ca0bb9b386254379466b45a995ad Mon Sep 17 00:00:00 2001 From: Sanjeev Bhatt Date: Tue, 27 Aug 2024 11:35:53 +0530 Subject: [PATCH 07/10] chore(spanner): Issue1178# [spanner_dbapi] While running a query that contains just comment, it causes an IndexError exception (#1181) - returned ProgrammingError - Invalid statement --- google/cloud/spanner_dbapi/cursor.py | 3 +++ google/cloud/spanner_dbapi/parse_utils.py | 2 ++ tests/system/test_dbapi.py | 4 ++++ 3 files changed, 9 insertions(+) diff --git a/google/cloud/spanner_dbapi/cursor.py b/google/cloud/spanner_dbapi/cursor.py index bcbc8aa5a8..8b4170e3f2 100644 --- a/google/cloud/spanner_dbapi/cursor.py +++ b/google/cloud/spanner_dbapi/cursor.py @@ -251,6 +251,9 @@ def _execute(self, sql, args=None, call_from_execute_many=False): exception = None try: self._parsed_statement = parse_utils.classify_statement(sql, args) + if self._parsed_statement is None: + raise ProgrammingError("Invalid Statement.") + if self._parsed_statement.statement_type == StatementType.CLIENT_SIDE: self._result_set = client_side_statement_executor.execute( self, self._parsed_statement diff --git a/google/cloud/spanner_dbapi/parse_utils.py b/google/cloud/spanner_dbapi/parse_utils.py index 5446458819..403550640e 100644 --- a/google/cloud/spanner_dbapi/parse_utils.py +++ b/google/cloud/spanner_dbapi/parse_utils.py @@ -226,6 +226,8 @@ def classify_statement(query, args=None): # PostgreSQL dollar quoted comments are not # supported and will not be stripped. query = sqlparse.format(query, strip_comments=True).strip() + if query == "": + return None parsed_statement: ParsedStatement = client_side_statement_parser.parse_stmt(query) if parsed_statement is not None: return parsed_statement diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py index 5a77024689..feb580d903 100644 --- a/tests/system/test_dbapi.py +++ b/tests/system/test_dbapi.py @@ -1598,3 +1598,7 @@ def test_list_tables(self, include_views): assert "contacts_emails" in table_names else: # if not include_views: assert "contacts_emails" not in table_names + + def test_invalid_statement_error(self): + with pytest.raises(ProgrammingError): + self._cursor.execute("-- comment only") From f886ebd80a6422c2167cd440a2a646f52701b684 Mon Sep 17 00:00:00 2001 From: bharadwajvr Date: Tue, 27 Aug 2024 05:22:37 -0700 Subject: [PATCH 08/10] feat: Create a few code snippets as examples for using Spanner Graph in Python (#1186) * Create a set of code snippets for using Graph on Cloud Spanner * Update to match gcloud/cli examples that exist in the docs * Fix update with graph query predicate syntax * Added an update step for allowing commit timestamps and changed to schema to not have that option * Fix styling using flake8 * Add tests for new Spanner Graph snippets * Fix some region tags that were inconsistent * Remove one unnecessary function and some redundant comments * Remove reference to allow_commit_timestamp * Fix lint issues in test file --------- Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- samples/samples/graph_snippets.py | 407 +++++++++++++++++++++++++ samples/samples/graph_snippets_test.py | 213 +++++++++++++ 2 files changed, 620 insertions(+) create mode 100644 samples/samples/graph_snippets.py create mode 100644 samples/samples/graph_snippets_test.py diff --git a/samples/samples/graph_snippets.py b/samples/samples/graph_snippets.py new file mode 100644 index 0000000000..e557290b19 --- /dev/null +++ b/samples/samples/graph_snippets.py @@ -0,0 +1,407 @@ +#!/usr/bin/env python + +# Copyright 2024 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to do basic graph operations using +Cloud Spanner. + +For more information, see the README.rst under /spanner. +""" + +import argparse + +from google.cloud import spanner + +OPERATION_TIMEOUT_SECONDS = 240 + + +# [START spanner_create_database_with_property_graph] +def create_database_with_property_graph(instance_id, database_id): + """Creates a database, tables and a property graph for sample data.""" + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin + + spanner_client = spanner.Client() + database_admin_api = spanner_client.database_admin_api + + request = spanner_database_admin.CreateDatabaseRequest( + parent=database_admin_api.instance_path(spanner_client.project, instance_id), + create_statement=f"CREATE DATABASE `{database_id}`", + extra_statements=[ + """CREATE TABLE Person ( + id INT64 NOT NULL, + name STRING(MAX), + birthday TIMESTAMP, + country STRING(MAX), + city STRING(MAX), + ) PRIMARY KEY (id)""", + """CREATE TABLE Account ( + id INT64 NOT NULL, + create_time TIMESTAMP, + is_blocked BOOL, + nick_name STRING(MAX), + ) PRIMARY KEY (id)""", + """CREATE TABLE PersonOwnAccount ( + id INT64 NOT NULL, + account_id INT64 NOT NULL, + create_time TIMESTAMP, + FOREIGN KEY (account_id) + REFERENCES Account (id) + ) PRIMARY KEY (id, account_id), + INTERLEAVE IN PARENT Person ON DELETE CASCADE""", + """CREATE TABLE AccountTransferAccount ( + id INT64 NOT NULL, + to_id INT64 NOT NULL, + amount FLOAT64, + create_time TIMESTAMP NOT NULL, + order_number STRING(MAX), + FOREIGN KEY (to_id) REFERENCES Account (id) + ) PRIMARY KEY (id, to_id, create_time), + INTERLEAVE IN PARENT Account ON DELETE CASCADE""", + """CREATE OR REPLACE PROPERTY GRAPH FinGraph + NODE TABLES (Account, Person) + EDGE TABLES ( + PersonOwnAccount + SOURCE KEY(id) REFERENCES Person(id) + DESTINATION KEY(account_id) REFERENCES Account(id) + LABEL Owns, + AccountTransferAccount + SOURCE KEY(id) REFERENCES Account(id) + DESTINATION KEY(to_id) REFERENCES Account(id) + LABEL Transfers)""", + ], + ) + + operation = database_admin_api.create_database(request=request) + + print("Waiting for operation to complete...") + database = operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + "Created database {} on instance {}".format( + database.name, + database_admin_api.instance_path(spanner_client.project, instance_id), + ) + ) + + +# [END spanner_create_database_with_property_graph] + + +# [START spanner_insert_graph_data] +def insert_data(instance_id, database_id): + """Inserts sample data into the given database. + + The database and tables must already exist and can be created using + `create_database_with_property_graph`. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.batch() as batch: + batch.insert( + table="Account", + columns=("id", "create_time", "is_blocked", "nick_name"), + values=[ + (7, "2020-01-10T06:22:20.12Z", False, "Vacation Fund"), + (16, "2020-01-27T17:55:09.12Z", True, "Vacation Fund"), + (20, "2020-02-18T05:44:20.12Z", False, "Rainy Day Fund"), + ], + ) + + batch.insert( + table="Person", + columns=("id", "name", "birthday", "country", "city"), + values=[ + (1, "Alex", "1991-12-21T00:00:00.12Z", "Australia", " Adelaide"), + (2, "Dana", "1980-10-31T00:00:00.12Z", "Czech_Republic", "Moravia"), + (3, "Lee", "1986-12-07T00:00:00.12Z", "India", "Kollam"), + ], + ) + + batch.insert( + table="AccountTransferAccount", + columns=("id", "to_id", "amount", "create_time", "order_number"), + values=[ + (7, 16, 300.0, "2020-08-29T15:28:58.12Z", "304330008004315"), + (7, 16, 100.0, "2020-10-04T16:55:05.12Z", "304120005529714"), + (16, 20, 300.0, "2020-09-25T02:36:14.12Z", "103650009791820"), + (20, 7, 500.0, "2020-10-04T16:55:05.12Z", "304120005529714"), + (20, 16, 200.0, "2020-10-17T03:59:40.12Z", "302290001255747"), + ], + ) + + batch.insert( + table="PersonOwnAccount", + columns=("id", "account_id", "create_time"), + values=[ + (1, 7, "2020-01-10T06:22:20.12Z"), + (2, 20, "2020-01-27T17:55:09.12Z"), + (3, 16, "2020-02-18T05:44:20.12Z"), + ], + ) + + print("Inserted data.") + + +# [END spanner_insert_graph_data] + + +# [START spanner_insert_graph_data_with_dml] +def insert_data_with_dml(instance_id, database_id): + """Inserts sample data into the given database using a DML statement.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def insert_accounts(transaction): + row_ct = transaction.execute_update( + "INSERT INTO Account (id, create_time, is_blocked) " + " VALUES" + " (1, CAST('2000-08-10 08:18:48.463959-07:52' AS TIMESTAMP), false)," + " (2, CAST('2000-08-12 07:13:16.463959-03:41' AS TIMESTAMP), true)" + ) + + print("{} record(s) inserted into Account.".format(row_ct)) + + def insert_transfers(transaction): + row_ct = transaction.execute_update( + "INSERT INTO AccountTransferAccount (id, to_id, create_time, amount) " + " VALUES" + " (1, 2, CAST('2000-09-11 03:11:18.463959-06:36' AS TIMESTAMP), 100)," + " (1, 1, CAST('2000-09-12 04:09:34.463959-05:12' AS TIMESTAMP), 200) " + ) + + print("{} record(s) inserted into AccountTransferAccount.".format(row_ct)) + + database.run_in_transaction(insert_accounts) + database.run_in_transaction(insert_transfers) + + +# [END spanner_insert_graph_data_with_dml] + + +# [START spanner_update_graph_data_with_dml] +def update_data_with_dml(instance_id, database_id): + """Updates sample data from the database using a DML statement.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def update_accounts(transaction): + row_ct = transaction.execute_update( + "UPDATE Account SET is_blocked = false WHERE id = 2" + ) + + print("{} Account record(s) updated.".format(row_ct)) + + def update_transfers(transaction): + row_ct = transaction.execute_update( + "UPDATE AccountTransferAccount SET amount = 300 WHERE id = 1 AND to_id = 2" + ) + + print("{} AccountTransferAccount record(s) updated.".format(row_ct)) + + database.run_in_transaction(update_accounts) + database.run_in_transaction(update_transfers) + + +# [END spanner_update_graph_data_with_dml] + + +# [START spanner_update_graph_data_with_graph_query_in_dml] +def update_data_with_graph_query_in_dml(instance_id, database_id): + """Updates sample data from the database using a DML statement.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def update_accounts(transaction): + row_ct = transaction.execute_update( + "UPDATE Account SET is_blocked = true " + "WHERE id IN {" + " GRAPH FinGraph" + " MATCH (a:Account WHERE a.id = 1)-[:TRANSFERS]->{1,2}(b:Account)" + " RETURN b.id}" + ) + + print("{} Account record(s) updated.".format(row_ct)) + + database.run_in_transaction(update_accounts) + + +# [END spanner_update_graph_data_with_graph_query_in_dml] + + +# [START spanner_query_graph_data] +def query_data(instance_id, database_id): + """Queries sample data from the database using GQL.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + """Graph FinGraph + MATCH (a:Person)-[o:Owns]->()-[t:Transfers]->()<-[p:Owns]-(b:Person) + RETURN a.name AS sender, b.name AS receiver, t.amount, t.create_time AS transfer_at""" + ) + + for row in results: + print("sender: {}, receiver: {}, amount: {}, transfer_at: {}".format(*row)) + + +# [END spanner_query_graph_data] + + +# [START spanner_query_graph_data_with_parameter] +def query_data_with_parameter(instance_id, database_id): + """Queries sample data from the database using SQL with a parameter.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + """Graph FinGraph + MATCH (a:Person)-[o:Owns]->()-[t:Transfers]->()<-[p:Owns]-(b:Person) + WHERE t.amount >= @min + RETURN a.name AS sender, b.name AS receiver, t.amount, t.create_time AS transfer_at""", + params={"min": 500}, + param_types={"min": spanner.param_types.INT64}, + ) + + for row in results: + print("sender: {}, receiver: {}, amount: {}, transfer_at: {}".format(*row)) + + +# [END spanner_query_graph_data_with_parameter] + + +# [START spanner_delete_graph_data_with_dml] +def delete_data_with_dml(instance_id, database_id): + """Deletes sample data from the database using a DML statement.""" + + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + def delete_transfers(transaction): + row_ct = transaction.execute_update( + "DELETE FROM AccountTransferAccount WHERE id = 1 AND to_id = 2" + ) + + print("{} AccountTransferAccount record(s) deleted.".format(row_ct)) + + def delete_accounts(transaction): + row_ct = transaction.execute_update("DELETE FROM Account WHERE id = 2") + + print("{} Account record(s) deleted.".format(row_ct)) + + database.run_in_transaction(delete_transfers) + database.run_in_transaction(delete_accounts) + + +# [END spanner_delete_graph_data_with_dml] + + +# [START spanner_delete_graph_data] +def delete_data(instance_id, database_id): + """Deletes sample data from the given database. + + The database, table, and data must already exist and can be created using + `create_database` and `insert_data`. + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + # Delete individual rows + ownerships_to_delete = spanner.KeySet(keys=[[1, 7], [2, 20]]) + + # Delete a range of rows where the column key is >=1 and <8 + transfers_range = spanner.KeyRange(start_closed=[1], end_open=[8]) + transfers_to_delete = spanner.KeySet(ranges=[transfers_range]) + + # Delete Account/Person rows, which will also delete the remaining + # AccountTransferAccount and PersonOwnAccount rows because + # AccountTransferAccount and PersonOwnAccount are defined with + # ON DELETE CASCADE + remaining_nodes = spanner.KeySet(all_=True) + + with database.batch() as batch: + batch.delete("PersonOwnAccount", ownerships_to_delete) + batch.delete("AccountTransferAccount", transfers_to_delete) + batch.delete("Account", remaining_nodes) + batch.delete("Person", remaining_nodes) + + print("Deleted data.") + + +# [END spanner_delete_graph_data] + + +if __name__ == "__main__": # noqa: C901 + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") + parser.add_argument( + "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + ) + + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser( + "create_database_with_property_graph", + help=create_database_with_property_graph.__doc__, + ) + subparsers.add_parser("insert_data", help=insert_data.__doc__) + subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) + subparsers.add_parser( + "update_data_with_graph_query_in_dml", + help=update_data_with_graph_query_in_dml.__doc__, + ) + subparsers.add_parser("query_data", help=query_data.__doc__) + subparsers.add_parser( + "query_data_with_parameter", help=query_data_with_parameter.__doc__ + ) + subparsers.add_parser("delete_data", help=delete_data.__doc__) + subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) + + args = parser.parse_args() + + if args.command == "create_database_with_property_graph": + create_database_with_property_graph(args.instance_id, args.database_id) + elif args.command == "insert_data": + insert_data(args.instance_id, args.database_id) + elif args.command == "insert_data_with_dml": + insert_data_with_dml(args.instance_id, args.database_id) + elif args.command == "update_data_with_dml": + update_data_with_dml(args.instance_id, args.database_id) + elif args.command == "update_data_with_graph_query_in_dml": + update_data_with_graph_query_in_dml(args.instance_id, args.database_id) + elif args.command == "query_data": + query_data(args.instance_id, args.database_id) + elif args.command == "query_data_with_parameter": + query_data_with_parameter(args.instance_id, args.database_id) + elif args.command == "delete_data_with_dml": + delete_data_with_dml(args.instance_id, args.database_id) + elif args.command == "delete_data": + delete_data(args.instance_id, args.database_id) diff --git a/samples/samples/graph_snippets_test.py b/samples/samples/graph_snippets_test.py new file mode 100644 index 0000000000..bd49260007 --- /dev/null +++ b/samples/samples/graph_snippets_test.py @@ -0,0 +1,213 @@ +# Copyright 2024 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# import time +import uuid +import pytest + +from google.api_core import exceptions + +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +from test_utils.retry import RetryErrors + +import graph_snippets + +retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15) + +CREATE_TABLE_PERSON = """\ +CREATE TABLE Person ( + id INT64 NOT NULL, + name STRING(MAX), + birthday TIMESTAMP, + country STRING(MAX), + city STRING(MAX), +) PRIMARY KEY (id) +""" + +CREATE_TABLE_ACCOUNT = """\ + CREATE TABLE Account ( + id INT64 NOT NULL, + create_time TIMESTAMP, + is_blocked BOOL, + nick_name STRING(MAX), + ) PRIMARY KEY (id) +""" + +CREATE_TABLE_PERSON_OWN_ACCOUNT = """\ +CREATE TABLE PersonOwnAccount ( + id INT64 NOT NULL, + account_id INT64 NOT NULL, + create_time TIMESTAMP, + FOREIGN KEY (account_id) + REFERENCES Account (id) + ) PRIMARY KEY (id, account_id), + INTERLEAVE IN PARENT Person ON DELETE CASCADE +""" + +CREATE_TABLE_ACCOUNT_TRANSFER_ACCOUNT = """\ +CREATE TABLE AccountTransferAccount ( + id INT64 NOT NULL, + to_id INT64 NOT NULL, + amount FLOAT64, + create_time TIMESTAMP NOT NULL, + order_number STRING(MAX), + FOREIGN KEY (to_id) REFERENCES Account (id) + ) PRIMARY KEY (id, to_id, create_time), + INTERLEAVE IN PARENT Account ON DELETE CASCADE +""" + +CREATE_PROPERTY_GRAPH = """ +CREATE OR REPLACE PROPERTY GRAPH FinGraph + NODE TABLES (Account, Person) + EDGE TABLES ( + PersonOwnAccount + SOURCE KEY(id) REFERENCES Person(id) + DESTINATION KEY(account_id) REFERENCES Account(id) + LABEL Owns, + AccountTransferAccount + SOURCE KEY(id) REFERENCES Account(id) + DESTINATION KEY(to_id) REFERENCES Account(id) + LABEL Transfers) +""" + + +@pytest.fixture(scope="module") +def sample_name(): + return "snippets" + + +@pytest.fixture(scope="module") +def database_dialect(): + """Spanner dialect to be used for this sample. + + The dialect is used to initialize the dialect for the database. + It can either be GoogleStandardSql or PostgreSql. + """ + return DatabaseDialect.GOOGLE_STANDARD_SQL + + +@pytest.fixture(scope="module") +def database_id(): + return f"test-db-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def create_database_id(): + return f"create-db-{uuid.uuid4().hex[:10]}" + + +@pytest.fixture(scope="module") +def database_ddl(): + """Sequence of DDL statements used to set up the database. + + Sample testcase modules can override as needed. + """ + return [ + CREATE_TABLE_PERSON, + CREATE_TABLE_ACCOUNT, + CREATE_TABLE_PERSON_OWN_ACCOUNT, + CREATE_TABLE_ACCOUNT_TRANSFER_ACCOUNT, + CREATE_PROPERTY_GRAPH, + ] + + +def test_create_database_explicit(sample_instance, create_database_id): + graph_snippets.create_database_with_property_graph( + sample_instance.instance_id, create_database_id + ) + database = sample_instance.database(create_database_id) + database.drop() + + +@pytest.mark.dependency(name="insert_data") +def test_insert_data(capsys, instance_id, sample_database): + graph_snippets.insert_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Inserted data" in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_query_data(capsys, instance_id, sample_database): + graph_snippets.query_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert ( + "sender: Dana, receiver: Alex, amount: 500.0, transfer_at: 2020-10-04 16:55:05.120000+00:00" + in out + ) + assert ( + "sender: Lee, receiver: Dana, amount: 300.0, transfer_at: 2020-09-25 02:36:14.120000+00:00" + in out + ) + assert ( + "sender: Alex, receiver: Lee, amount: 300.0, transfer_at: 2020-08-29 15:28:58.120000+00:00" + in out + ) + assert ( + "sender: Alex, receiver: Lee, amount: 100.0, transfer_at: 2020-10-04 16:55:05.120000+00:00" + in out + ) + assert ( + "sender: Dana, receiver: Lee, amount: 200.0, transfer_at: 2020-10-17 03:59:40.120000+00:00" + in out + ) + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_query_data_with_parameter(capsys, instance_id, sample_database): + graph_snippets.query_data_with_parameter(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert ( + "sender: Dana, receiver: Alex, amount: 500.0, transfer_at: 2020-10-04 16:55:05.120000+00:00" + in out + ) + + +@pytest.mark.dependency(name="insert_data_with_dml", depends=["insert_data"]) +def test_insert_data_with_dml(capsys, instance_id, sample_database): + graph_snippets.insert_data_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "2 record(s) inserted into Account." in out + assert "2 record(s) inserted into AccountTransferAccount." in out + + +@pytest.mark.dependency(name="update_data_with_dml", depends=["insert_data_with_dml"]) +def test_update_data_with_dml(capsys, instance_id, sample_database): + graph_snippets.update_data_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 Account record(s) updated." in out + assert "1 AccountTransferAccount record(s) updated." in out + + +@pytest.mark.dependency(depends=["update_data_with_dml"]) +def test_update_data_with_graph_query_in_dml(capsys, instance_id, sample_database): + graph_snippets.update_data_with_graph_query_in_dml( + instance_id, sample_database.database_id + ) + out, _ = capsys.readouterr() + assert "2 Account record(s) updated." in out + + +@pytest.mark.dependency(depends=["update_data_with_dml"]) +def test_delete_data_with_graph_query_in_dml(capsys, instance_id, sample_database): + graph_snippets.delete_data_with_dml(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 AccountTransferAccount record(s) deleted." in out + assert "1 Account record(s) deleted." in out + + +@pytest.mark.dependency(depends=["insert_data"]) +def test_delete_data(capsys, instance_id, sample_database): + graph_snippets.delete_data(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Deleted data." in out From c4af6f09a449f293768f70a84e805ffe08c6c2fb Mon Sep 17 00:00:00 2001 From: Sumit Banerjee <123063931+forksumit@users.noreply.github.com> Date: Tue, 27 Aug 2024 19:54:23 +0530 Subject: [PATCH 09/10] fix: JsonObject init when called on JsonObject of list (#1166) Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- google/cloud/spanner_v1/data_types.py | 5 +++ tests/unit/test_datatypes.py | 45 +++++++++++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 tests/unit/test_datatypes.py diff --git a/google/cloud/spanner_v1/data_types.py b/google/cloud/spanner_v1/data_types.py index 130603afa9..63897b293c 100644 --- a/google/cloud/spanner_v1/data_types.py +++ b/google/cloud/spanner_v1/data_types.py @@ -38,6 +38,11 @@ def __init__(self, *args, **kwargs): self._array_value = args[0] return + if len(args) and isinstance(args[0], JsonObject): + self._is_array = args[0]._is_array + if self._is_array: + self._array_value = args[0]._array_value + if not self._is_null: super(JsonObject, self).__init__(*args, **kwargs) diff --git a/tests/unit/test_datatypes.py b/tests/unit/test_datatypes.py new file mode 100644 index 0000000000..60630f73d3 --- /dev/null +++ b/tests/unit/test_datatypes.py @@ -0,0 +1,45 @@ +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import unittest + +import json +from google.cloud.spanner_v1.data_types import JsonObject + + +class Test_JsonObject_serde(unittest.TestCase): + def test_w_dict(self): + data = {"foo": "bar"} + expected = json.dumps(data, sort_keys=True, separators=(",", ":")) + data_jsonobject = JsonObject(data) + self.assertEqual(data_jsonobject.serialize(), expected) + + def test_w_list_of_dict(self): + data = [{"foo1": "bar1"}, {"foo2": "bar2"}] + expected = json.dumps(data, sort_keys=True, separators=(",", ":")) + data_jsonobject = JsonObject(data) + self.assertEqual(data_jsonobject.serialize(), expected) + + def test_w_JsonObject_of_dict(self): + data = {"foo": "bar"} + expected = json.dumps(data, sort_keys=True, separators=(",", ":")) + data_jsonobject = JsonObject(JsonObject(data)) + self.assertEqual(data_jsonobject.serialize(), expected) + + def test_w_JsonObject_of_list_of_dict(self): + data = [{"foo1": "bar1"}, {"foo2": "bar2"}] + expected = json.dumps(data, sort_keys=True, separators=(",", ":")) + data_jsonobject = JsonObject(JsonObject(data)) + self.assertEqual(data_jsonobject.serialize(), expected) From d2c05239806e961076c49012b478cf992402a174 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 16:52:01 +0530 Subject: [PATCH 10/10] chore(main): release 3.49.0 (#1182) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 3.49.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- .release-please-manifest.json | 2 +- CHANGELOG.md | 14 ++++++++++++++ .../spanner_admin_database_v1/gapic_version.py | 2 +- .../spanner_admin_instance_v1/gapic_version.py | 2 +- google/cloud/spanner_v1/gapic_version.py | 2 +- noxfile.py | 2 +- ..._metadata_google.spanner.admin.database.v1.json | 2 +- ..._metadata_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- 9 files changed, 22 insertions(+), 8 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index cc48236467..b1de15d9a3 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.48.0" + ".": "3.49.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 89494da26a..05af3ad3d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## [3.49.0](https://github.com/googleapis/python-spanner/compare/v3.48.0...v3.49.0) (2024-08-27) + + +### Features + +* Create a few code snippets as examples for using Spanner Graph in Python ([#1186](https://github.com/googleapis/python-spanner/issues/1186)) ([f886ebd](https://github.com/googleapis/python-spanner/commit/f886ebd80a6422c2167cd440a2a646f52701b684)) +* **spanner:** Add resource reference annotation to backup schedules ([#1176](https://github.com/googleapis/python-spanner/issues/1176)) ([b503fc9](https://github.com/googleapis/python-spanner/commit/b503fc95d8abd47869a24f0e824a227a281282d6)) +* **spanner:** Add samples for instance partitions ([#1168](https://github.com/googleapis/python-spanner/issues/1168)) ([55f83dc](https://github.com/googleapis/python-spanner/commit/55f83dc5f776d436b30da6056a9cdcad3971ce39)) + + +### Bug Fixes + +* JsonObject init when called on JsonObject of list ([#1166](https://github.com/googleapis/python-spanner/issues/1166)) ([c4af6f0](https://github.com/googleapis/python-spanner/commit/c4af6f09a449f293768f70a84e805ffe08c6c2fb)) + ## [3.48.0](https://github.com/googleapis/python-spanner/compare/v3.47.0...v3.48.0) (2024-07-30) diff --git a/google/cloud/spanner_admin_database_v1/gapic_version.py b/google/cloud/spanner_admin_database_v1/gapic_version.py index ebd305d0c8..66fbf6e926 100644 --- a/google/cloud/spanner_admin_database_v1/gapic_version.py +++ b/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.48.0" # {x-release-please-version} +__version__ = "3.49.0" # {x-release-please-version} diff --git a/google/cloud/spanner_admin_instance_v1/gapic_version.py b/google/cloud/spanner_admin_instance_v1/gapic_version.py index ebd305d0c8..66fbf6e926 100644 --- a/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ b/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.48.0" # {x-release-please-version} +__version__ = "3.49.0" # {x-release-please-version} diff --git a/google/cloud/spanner_v1/gapic_version.py b/google/cloud/spanner_v1/gapic_version.py index ebd305d0c8..66fbf6e926 100644 --- a/google/cloud/spanner_v1/gapic_version.py +++ b/google/cloud/spanner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.48.0" # {x-release-please-version} +__version__ = "3.49.0" # {x-release-please-version} diff --git a/noxfile.py b/noxfile.py index e599d96369..8f0452d4d2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -433,7 +433,7 @@ def prerelease_deps(session, protobuf_implementation, database_dialect): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing, testing]") + session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index 86a6b4fa78..94d4ebb351 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "0.1.0" + "version": "3.49.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index ac2f8c24ec..2805d839f7 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "0.1.0" + "version": "3.49.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index 4384d19e2a..f3058f4e63 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "0.1.0" + "version": "3.49.0" }, "snippets": [ { pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy