`.
+ The long-running operation
+ [metadata][google.longrunning.Operation.metadata] field type
+ `metadata.type_url` describes the type of the metadata. Operations returned
+ include those that have completed/failed/canceled within the last 7 days,
+ and pending operations. Operations returned are ordered by
+ `operation.metadata.value.progress.start_time` in descending order starting
+ from the most recently started operation.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListDatabaseRoles(self, request, context):
+ """Lists Cloud Spanner database roles."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def CreateBackupSchedule(self, request, context):
+ """Creates a new backup schedule."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetBackupSchedule(self, request, context):
+ """Gets backup schedule for the input schedule name."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def UpdateBackupSchedule(self, request, context):
+ """Updates a backup schedule."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteBackupSchedule(self, request, context):
+ """Deletes a backup schedule."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListBackupSchedules(self, request, context):
+ """Lists all the backup schedules for the database."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_DatabaseAdminServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "ListDatabases": grpc.unary_unary_rpc_method_handler(
+ servicer.ListDatabases,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.serialize,
+ ),
+ "CreateDatabase": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateDatabase,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.deserialize,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "GetDatabase": grpc.unary_unary_rpc_method_handler(
+ servicer.GetDatabase,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.serialize,
+ ),
+ "UpdateDatabase": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateDatabase,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.deserialize,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "UpdateDatabaseDdl": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateDatabaseDdl,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.deserialize,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "DropDatabase": grpc.unary_unary_rpc_method_handler(
+ servicer.DropDatabase,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.deserialize,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "GetDatabaseDdl": grpc.unary_unary_rpc_method_handler(
+ servicer.GetDatabaseDdl,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.serialize,
+ ),
+ "SetIamPolicy": grpc.unary_unary_rpc_method_handler(
+ servicer.SetIamPolicy,
+ request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString,
+ response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
+ ),
+ "GetIamPolicy": grpc.unary_unary_rpc_method_handler(
+ servicer.GetIamPolicy,
+ request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString,
+ response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
+ ),
+ "TestIamPermissions": grpc.unary_unary_rpc_method_handler(
+ servicer.TestIamPermissions,
+ request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString,
+ response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString,
+ ),
+ "CreateBackup": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateBackup,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.deserialize,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "CopyBackup": grpc.unary_unary_rpc_method_handler(
+ servicer.CopyBackup,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.deserialize,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "GetBackup": grpc.unary_unary_rpc_method_handler(
+ servicer.GetBackup,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize,
+ ),
+ "UpdateBackup": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateBackup,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize,
+ ),
+ "DeleteBackup": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteBackup,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.deserialize,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "ListBackups": grpc.unary_unary_rpc_method_handler(
+ servicer.ListBackups,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.serialize,
+ ),
+ "RestoreDatabase": grpc.unary_unary_rpc_method_handler(
+ servicer.RestoreDatabase,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.deserialize,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "ListDatabaseOperations": grpc.unary_unary_rpc_method_handler(
+ servicer.ListDatabaseOperations,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.serialize,
+ ),
+ "ListBackupOperations": grpc.unary_unary_rpc_method_handler(
+ servicer.ListBackupOperations,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.serialize,
+ ),
+ "ListDatabaseRoles": grpc.unary_unary_rpc_method_handler(
+ servicer.ListDatabaseRoles,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.serialize,
+ ),
+ "CreateBackupSchedule": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateBackupSchedule,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize,
+ ),
+ "GetBackupSchedule": grpc.unary_unary_rpc_method_handler(
+ servicer.GetBackupSchedule,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize,
+ ),
+ "UpdateBackupSchedule": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateBackupSchedule,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize,
+ ),
+ "DeleteBackupSchedule": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteBackupSchedule,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.deserialize,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "ListBackupSchedules": grpc.unary_unary_rpc_method_handler(
+ servicer.ListBackupSchedules,
+ request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.serialize,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.spanner.admin.database.v1.DatabaseAdmin", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
+ server.add_registered_method_handlers(
+ "google.spanner.admin.database.v1.DatabaseAdmin", rpc_method_handlers
+ )
+
+
+# This class is part of an EXPERIMENTAL API.
+class DatabaseAdmin(object):
+ """Cloud Spanner Database Admin API
+
+ The Cloud Spanner Database Admin API can be used to:
+ * create, drop, and list databases
+ * update the schema of pre-existing databases
+ * create, delete, copy and list backups for a database
+ * restore a database from an existing backup
+ """
+
+ @staticmethod
+ def ListDatabases(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def CreateDatabase(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def GetDatabase(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def UpdateDatabase(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def UpdateDatabaseDdl(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def DropDatabase(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def GetDatabaseDdl(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def SetIamPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy",
+ google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString,
+ google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def GetIamPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy",
+ google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString,
+ google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def TestIamPermissions(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions",
+ google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString,
+ google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def CreateBackup(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def CopyBackup(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def GetBackup(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def UpdateBackup(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def DeleteBackup(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ListBackups(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def RestoreDatabase(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ListDatabaseOperations(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ListBackupOperations(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ListDatabaseRoles(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def CreateBackupSchedule(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def GetBackupSchedule(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def UpdateBackupSchedule(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def DeleteBackupSchedule(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ListBackupSchedules(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules",
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.SerializeToString,
+ google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
diff --git a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py
new file mode 100644
index 0000000000..c4622a6a34
--- /dev/null
+++ b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py
@@ -0,0 +1,882 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+
+# Generated with the following commands:
+#
+# pip install grpcio-tools
+# git clone git@github.com:googleapis/googleapis.git
+# cd googleapis
+# python -m grpc_tools.protoc \
+# -I . \
+# --python_out=. --pyi_out=. --grpc_python_out=. \
+# ./google/spanner/v1/*.proto
+
+"""Client and server classes corresponding to protobuf-defined services."""
+
+import grpc
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+from google.cloud.spanner_v1.types import (
+ commit_response as google_dot_spanner_dot_v1_dot_commit__response__pb2,
+)
+from google.cloud.spanner_v1.types import (
+ result_set as google_dot_spanner_dot_v1_dot_result__set__pb2,
+)
+from google.cloud.spanner_v1.types import (
+ spanner as google_dot_spanner_dot_v1_dot_spanner__pb2,
+)
+from google.cloud.spanner_v1.types import (
+ transaction as google_dot_spanner_dot_v1_dot_transaction__pb2,
+)
+
+GRPC_GENERATED_VERSION = "1.67.0"
+GRPC_VERSION = grpc.__version__
+_version_not_supported = False
+
+try:
+ from grpc._utilities import first_version_is_lower
+
+ _version_not_supported = first_version_is_lower(
+ GRPC_VERSION, GRPC_GENERATED_VERSION
+ )
+except ImportError:
+ _version_not_supported = True
+
+if _version_not_supported:
+ raise RuntimeError(
+ f"The grpc package installed is at version {GRPC_VERSION},"
+ + " but the generated code in google/spanner/v1/spanner_pb2_grpc.py depends on"
+ + f" grpcio>={GRPC_GENERATED_VERSION}."
+ + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}"
+ + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}."
+ )
+
+
+class SpannerServicer(object):
+ """Cloud Spanner API
+
+ The Cloud Spanner API can be used to manage sessions and execute
+ transactions on data stored in Cloud Spanner databases.
+ """
+
+ def CreateSession(self, request, context):
+ """Creates a new session. A session can be used to perform
+ transactions that read and/or modify data in a Cloud Spanner database.
+ Sessions are meant to be reused for many consecutive
+ transactions.
+
+ Sessions can only execute one transaction at a time. To execute
+ multiple concurrent read-write/write-only transactions, create
+ multiple sessions. Note that standalone reads and queries use a
+ transaction internally, and count toward the one transaction
+ limit.
+
+ Active sessions use additional server resources, so it is a good idea to
+ delete idle and unneeded sessions.
+ Aside from explicit deletes, Cloud Spanner may delete sessions for which no
+ operations are sent for more than an hour. If a session is deleted,
+ requests to it return `NOT_FOUND`.
+
+ Idle sessions can be kept alive by sending a trivial SQL query
+ periodically, e.g., `"SELECT 1"`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def BatchCreateSessions(self, request, context):
+ """Creates multiple new sessions.
+
+ This API can be used to initialize a session cache on the clients.
+ See https://goo.gl/TgSFN2 for best practices on session cache management.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def GetSession(self, request, context):
+ """Gets a session. Returns `NOT_FOUND` if the session does not exist.
+ This is mainly useful for determining whether a session is still
+ alive.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ListSessions(self, request, context):
+ """Lists all sessions in a given database."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def DeleteSession(self, request, context):
+ """Ends a session, releasing server resources associated with it. This will
+ asynchronously trigger cancellation of any operations that are running with
+ this session.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ExecuteSql(self, request, context):
+ """Executes an SQL statement, returning all results in a single reply. This
+ method cannot be used to return a result set larger than 10 MiB;
+ if the query yields more data than that, the query fails with
+ a `FAILED_PRECONDITION` error.
+
+ Operations inside read-write transactions might return `ABORTED`. If
+ this occurs, the application should restart the transaction from
+ the beginning. See [Transaction][google.spanner.v1.Transaction] for more
+ details.
+
+ Larger result sets can be fetched in streaming fashion by calling
+ [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]
+ instead.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ExecuteStreamingSql(self, request, context):
+ """Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except returns the
+ result set as a stream. Unlike
+ [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no limit on
+ the size of the returned result set. However, no individual row in the
+ result set can exceed 100 MiB, and no column value can exceed 10 MiB.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def ExecuteBatchDml(self, request, context):
+ """Executes a batch of SQL DML statements. This method allows many statements
+ to be run with lower latency than submitting them sequentially with
+ [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql].
+
+ Statements are executed in sequential order. A request can succeed even if
+ a statement fails. The
+ [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status]
+ field in the response provides information about the statement that failed.
+ Clients must inspect this field to determine whether an error occurred.
+
+ Execution stops after the first failed statement; the remaining statements
+ are not executed.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Read(self, request, context):
+ """Reads rows from the database using key lookups and scans, as a
+ simple key/value style alternative to
+ [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method cannot be
+ used to return a result set larger than 10 MiB; if the read matches more
+ data than that, the read fails with a `FAILED_PRECONDITION`
+ error.
+
+ Reads inside read-write transactions might return `ABORTED`. If
+ this occurs, the application should restart the transaction from
+ the beginning. See [Transaction][google.spanner.v1.Transaction] for more
+ details.
+
+ Larger result sets can be yielded in streaming fashion by calling
+ [StreamingRead][google.spanner.v1.Spanner.StreamingRead] instead.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def StreamingRead(self, request, context):
+ """Like [Read][google.spanner.v1.Spanner.Read], except returns the result set
+ as a stream. Unlike [Read][google.spanner.v1.Spanner.Read], there is no
+ limit on the size of the returned result set. However, no individual row in
+ the result set can exceed 100 MiB, and no column value can exceed
+ 10 MiB.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def BeginTransaction(self, request, context):
+ """Begins a new transaction. This step can often be skipped:
+ [Read][google.spanner.v1.Spanner.Read],
+ [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and
+ [Commit][google.spanner.v1.Spanner.Commit] can begin a new transaction as a
+ side-effect.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Commit(self, request, context):
+ """Commits a transaction. The request includes the mutations to be
+ applied to rows in the database.
+
+ `Commit` might return an `ABORTED` error. This can occur at any time;
+ commonly, the cause is conflicts with concurrent
+ transactions. However, it can also happen for a variety of other
+ reasons. If `Commit` returns `ABORTED`, the caller should re-attempt
+ the transaction from the beginning, re-using the same session.
+
+ On very rare occasions, `Commit` might return `UNKNOWN`. This can happen,
+ for example, if the client job experiences a 1+ hour networking failure.
+ At that point, Cloud Spanner has lost track of the transaction outcome and
+ we recommend that you perform another read from the database to see the
+ state of things as they are now.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def Rollback(self, request, context):
+ """Rolls back a transaction, releasing any locks it holds. It is a good
+ idea to call this for any transaction that includes one or more
+ [Read][google.spanner.v1.Spanner.Read] or
+ [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and ultimately
+ decides not to commit.
+
+ `Rollback` returns `OK` if it successfully aborts the transaction, the
+ transaction was already aborted, or the transaction is not
+ found. `Rollback` never returns `ABORTED`.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def PartitionQuery(self, request, context):
+ """Creates a set of partition tokens that can be used to execute a query
+ operation in parallel. Each of the returned partition tokens can be used
+ by [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] to
+ specify a subset of the query result to read. The same session and
+ read-only transaction must be used by the PartitionQueryRequest used to
+ create the partition tokens and the ExecuteSqlRequests that use the
+ partition tokens.
+
+ Partition tokens become invalid when the session used to create them
+ is deleted, is idle for too long, begins a new transaction, or becomes too
+ old. When any of these happen, it is not possible to resume the query, and
+ the whole operation must be restarted from the beginning.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def PartitionRead(self, request, context):
+ """Creates a set of partition tokens that can be used to execute a read
+ operation in parallel. Each of the returned partition tokens can be used
+ by [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to specify a
+ subset of the read result to read. The same session and read-only
+ transaction must be used by the PartitionReadRequest used to create the
+ partition tokens and the ReadRequests that use the partition tokens. There
+ are no ordering guarantees on rows returned among the returned partition
+ tokens, or even within each individual StreamingRead call issued with a
+ partition_token.
+
+ Partition tokens become invalid when the session used to create them
+ is deleted, is idle for too long, begins a new transaction, or becomes too
+ old. When any of these happen, it is not possible to resume the read, and
+ the whole operation must be restarted from the beginning.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+ def BatchWrite(self, request, context):
+ """Batches the supplied mutation groups in a collection of efficient
+ transactions. All mutations in a group are committed atomically. However,
+ mutations across groups can be committed non-atomically in an unspecified
+ order and thus, they must be independent of each other. Partial failure is
+ possible, i.e., some groups may have been committed successfully, while
+ some may have failed. The results of individual batches are streamed into
+ the response as the batches are applied.
+
+ BatchWrite requests are not replay protected, meaning that each mutation
+ group may be applied more than once. Replays of non-idempotent mutations
+ may have undesirable effects. For example, replays of an insert mutation
+ may produce an already exists error or if you use generated or commit
+ timestamp-based keys, it may result in additional rows being added to the
+ mutation's table. We recommend structuring your mutation groups to be
+ idempotent to avoid this issue.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_SpannerServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "CreateSession": grpc.unary_unary_rpc_method_handler(
+ servicer.CreateSession,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize,
+ ),
+ "BatchCreateSessions": grpc.unary_unary_rpc_method_handler(
+ servicer.BatchCreateSessions,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.serialize,
+ ),
+ "GetSession": grpc.unary_unary_rpc_method_handler(
+ servicer.GetSession,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize,
+ ),
+ "ListSessions": grpc.unary_unary_rpc_method_handler(
+ servicer.ListSessions,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.serialize,
+ ),
+ "DeleteSession": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteSession,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.deserialize,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "ExecuteSql": grpc.unary_unary_rpc_method_handler(
+ servicer.ExecuteSql,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize,
+ ),
+ "ExecuteStreamingSql": grpc.unary_stream_rpc_method_handler(
+ servicer.ExecuteStreamingSql,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize,
+ ),
+ "ExecuteBatchDml": grpc.unary_unary_rpc_method_handler(
+ servicer.ExecuteBatchDml,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.serialize,
+ ),
+ "Read": grpc.unary_unary_rpc_method_handler(
+ servicer.Read,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize,
+ ),
+ "StreamingRead": grpc.unary_stream_rpc_method_handler(
+ servicer.StreamingRead,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize,
+ ),
+ "BeginTransaction": grpc.unary_unary_rpc_method_handler(
+ servicer.BeginTransaction,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.serialize,
+ ),
+ "Commit": grpc.unary_unary_rpc_method_handler(
+ servicer.Commit,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.serialize,
+ ),
+ "Rollback": grpc.unary_unary_rpc_method_handler(
+ servicer.Rollback,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.deserialize,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "PartitionQuery": grpc.unary_unary_rpc_method_handler(
+ servicer.PartitionQuery,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize,
+ ),
+ "PartitionRead": grpc.unary_unary_rpc_method_handler(
+ servicer.PartitionRead,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize,
+ ),
+ "BatchWrite": grpc.unary_stream_rpc_method_handler(
+ servicer.BatchWrite,
+ request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.deserialize,
+ response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.serialize,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.spanner.v1.Spanner", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
+ server.add_registered_method_handlers(
+ "google.spanner.v1.Spanner", rpc_method_handlers
+ )
+
+
+# This class is part of an EXPERIMENTAL API.
+class Spanner(object):
+ """Cloud Spanner API
+
+ The Cloud Spanner API can be used to manage sessions and execute
+ transactions on data stored in Cloud Spanner databases.
+ """
+
+ @staticmethod
+ def CreateSession(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/CreateSession",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def BatchCreateSessions(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/BatchCreateSessions",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def GetSession(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/GetSession",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ListSessions(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/ListSessions",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def DeleteSession(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/DeleteSession",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json,
+ google_dot_protobuf_dot_empty__pb2.Empty.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ExecuteSql(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/ExecuteSql",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json,
+ google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ExecuteStreamingSql(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/ExecuteStreamingSql",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json,
+ google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def ExecuteBatchDml(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/ExecuteBatchDml",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def Read(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/Read",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json,
+ google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def StreamingRead(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/StreamingRead",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json,
+ google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def BeginTransaction(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/BeginTransaction",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json,
+ google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def Commit(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/Commit",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json,
+ google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def Rollback(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/Rollback",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json,
+ google_dot_protobuf_dot_empty__pb2.Empty.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def PartitionQuery(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/PartitionQuery",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def PartitionRead(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/PartitionRead",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
+
+ @staticmethod
+ def BatchWrite(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ "/google.spanner.v1.Spanner/BatchWrite",
+ google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json,
+ google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True,
+ )
diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py
index c872cc380d..d99c4fde2f 100644
--- a/google/cloud/spanner_v1/transaction.py
+++ b/google/cloud/spanner_v1/transaction.py
@@ -98,7 +98,13 @@ def _make_txn_selector(self):
return TransactionSelector(id=self._transaction_id)
def _execute_request(
- self, method, request, trace_name=None, session=None, attributes=None
+ self,
+ method,
+ request,
+ trace_name=None,
+ session=None,
+ attributes=None,
+ observability_options=None,
):
"""Helper method to execute request after fetching transaction selector.
@@ -110,7 +116,9 @@ def _execute_request(
"""
transaction = self._make_txn_selector()
request.transaction = transaction
- with trace_call(trace_name, session, attributes):
+ with trace_call(
+ trace_name, session, attributes, observability_options=observability_options
+ ):
method = functools.partial(method, request=request)
response = _retry(
method,
@@ -147,7 +155,12 @@ def begin(self):
read_write=TransactionOptions.ReadWrite(),
exclude_txn_from_change_streams=self.exclude_txn_from_change_streams,
)
- with trace_call("CloudSpanner.BeginTransaction", self._session):
+ observability_options = getattr(database, "observability_options", None)
+ with trace_call(
+ "CloudSpanner.BeginTransaction",
+ self._session,
+ observability_options=observability_options,
+ ):
method = functools.partial(
api.begin_transaction,
session=self._session.name,
@@ -175,7 +188,12 @@ def rollback(self):
database._route_to_leader_enabled
)
)
- with trace_call("CloudSpanner.Rollback", self._session):
+ observability_options = getattr(database, "observability_options", None)
+ with trace_call(
+ "CloudSpanner.Rollback",
+ self._session,
+ observability_options=observability_options,
+ ):
method = functools.partial(
api.rollback,
session=self._session.name,
@@ -248,7 +266,13 @@ def commit(
max_commit_delay=max_commit_delay,
request_options=request_options,
)
- with trace_call("CloudSpanner.Commit", self._session, trace_attributes):
+ observability_options = getattr(database, "observability_options", None)
+ with trace_call(
+ "CloudSpanner.Commit",
+ self._session,
+ trace_attributes,
+ observability_options,
+ ):
method = functools.partial(
api.commit,
request=request,
@@ -284,7 +308,7 @@ def _make_params_pb(params, param_types):
:raises ValueError:
If ``params`` is None but ``param_types`` is not None.
"""
- if params is not None:
+ if params:
return Struct(
fields={key: _make_value_pb(value) for key, value in params.items()}
)
@@ -362,6 +386,9 @@ def execute_update(
# environment-level options
default_query_options = database._instance._client._query_options
query_options = _merge_query_options(default_query_options, query_options)
+ observability_options = getattr(
+ database._instance._client, "observability_options", None
+ )
if request_options is None:
request_options = RequestOptions()
@@ -399,6 +426,7 @@ def execute_update(
"CloudSpanner.ReadWriteTransaction",
self._session,
trace_attributes,
+ observability_options=observability_options,
)
# Setting the transaction id because the transaction begin was inlined for first rpc.
if (
@@ -415,6 +443,7 @@ def execute_update(
"CloudSpanner.ReadWriteTransaction",
self._session,
trace_attributes,
+ observability_options=observability_options,
)
return response.stats.row_count_exact
@@ -481,6 +510,7 @@ def batch_update(
_metadata_with_leader_aware_routing(database._route_to_leader_enabled)
)
api = database.spanner_api
+ observability_options = getattr(database, "observability_options", None)
seqno, self._execute_sql_count = (
self._execute_sql_count,
@@ -521,6 +551,7 @@ def batch_update(
"CloudSpanner.DMLTransaction",
self._session,
trace_attributes,
+ observability_options=observability_options,
)
# Setting the transaction id because the transaction begin was inlined for first rpc.
for result_set in response.result_sets:
@@ -538,6 +569,7 @@ def batch_update(
"CloudSpanner.DMLTransaction",
self._session,
trace_attributes,
+ observability_options=observability_options,
)
row_counts = [
diff --git a/noxfile.py b/noxfile.py
index f5a2761d73..f32c24f1e3 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -34,6 +34,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
+DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12"
UNIT_TEST_PYTHON_VERSIONS: List[str] = [
"3.7",
"3.8",
@@ -234,6 +235,34 @@ def unit(session, protobuf_implementation):
)
+@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION)
+def mockserver(session):
+ # Install all test dependencies, then install this package in-place.
+
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ # install_unittest_dependencies(session, "-c", constraints_path)
+ standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES
+ session.install(*standard_deps, "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
+
+ # Run py.test against the mockserver tests.
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google",
+ "--cov=tests/unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "mockserver_tests"),
+ *session.posargs,
+ )
+
+
def install_systemtest_dependencies(session, *constraints):
# Use pre-release gRPC for system tests.
# Exclude version 1.52.0rc1 which has a known issue.
diff --git a/owlbot.py b/owlbot.py
index c215f26946..e7fb391c2a 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -307,4 +307,49 @@ def prerelease_deps\(session, protobuf_implementation\):""",
def prerelease_deps(session, protobuf_implementation, database_dialect):""",
)
+
+mockserver_test = """
+@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION)
+def mockserver(session):
+ # Install all test dependencies, then install this package in-place.
+
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ # install_unittest_dependencies(session, "-c", constraints_path)
+ standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES
+ session.install(*standard_deps, "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
+
+ # Run py.test against the mockserver tests.
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google",
+ "--cov=tests/unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "mockserver_tests"),
+ *session.posargs,
+ )
+
+"""
+
+place_before(
+ "noxfile.py",
+ "def install_systemtest_dependencies(session, *constraints):",
+ mockserver_test,
+ escape="()_*:",
+)
+
+place_before(
+ "noxfile.py",
+ "UNIT_TEST_PYTHON_VERSIONS: List[str] = [",
+ 'DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12"',
+ escape="[]",
+)
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json
index 9324f2056b..7c35814b17 100644
--- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json
+++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-spanner-admin-database",
- "version": "3.50.1"
+ "version": "3.51.0"
},
"snippets": [
{
diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json
index 7f64769236..261a7d44f3 100644
--- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json
+++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-spanner-admin-instance",
- "version": "3.50.1"
+ "version": "3.51.0"
},
"snippets": [
{
diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json
index 431109d19e..ddb4419273 100644
--- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json
+++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-spanner",
- "version": "3.50.1"
+ "version": "3.51.0"
},
"snippets": [
{
diff --git a/samples/samples/requirements.txt b/samples/samples/requirements.txt
index 5a108d39ef..4009a0a00b 100644
--- a/samples/samples/requirements.txt
+++ b/samples/samples/requirements.txt
@@ -1,2 +1,2 @@
-google-cloud-spanner==3.49.1
+google-cloud-spanner==3.50.0
futures==3.4.0; python_version < "3"
diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py
index c958a66822..6650ebe88d 100644
--- a/samples/samples/snippets.py
+++ b/samples/samples/snippets.py
@@ -3222,6 +3222,57 @@ def create_instance_with_autoscaling_config(instance_id):
# [END spanner_create_instance_with_autoscaling_config]
+# [START spanner_create_instance_without_default_backup_schedule]
+def create_instance_without_default_backup_schedules(instance_id):
+ spanner_client = spanner.Client()
+ config_name = "{}/instanceConfigs/regional-me-central2".format(
+ spanner_client.project_name
+ )
+
+ operation = spanner_client.instance_admin_api.create_instance(
+ parent=spanner_client.project_name,
+ instance_id=instance_id,
+ instance=spanner_instance_admin.Instance(
+ config=config_name,
+ display_name="This is a display name.",
+ node_count=1,
+ default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, # Optional
+ ),
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(OPERATION_TIMEOUT_SECONDS)
+
+ print("Created instance {} without default backup schedules".format(instance_id))
+
+
+# [END spanner_create_instance_without_default_backup_schedule]
+
+
+# [START spanner_update_instance_default_backup_schedule_type]
+def update_instance_default_backup_schedule_type(instance_id):
+ spanner_client = spanner.Client()
+
+ name = "{}/instances/{}".format(spanner_client.project_name, instance_id)
+
+ operation = spanner_client.instance_admin_api.update_instance(
+ instance=spanner_instance_admin.Instance(
+ name=name,
+ default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.AUTOMATIC, # Optional
+ ),
+ field_mask=field_mask_pb2.FieldMask(
+ paths=["default_backup_schedule_type"]
+ ),
+ )
+
+ print("Waiting for operation to complete...")
+ operation.result(OPERATION_TIMEOUT_SECONDS)
+
+ print("Updated instance {} to have default backup schedules".format(instance_id))
+
+# [END spanner_update_instance_default_backup_schedule_type]
+
+
def add_proto_type_columns(instance_id, database_id):
# [START spanner_add_proto_type_columns]
# instance_id = "your-spanner-instance"
diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py
index ba3c0bbfe7..87fa7a43a2 100644
--- a/samples/samples/snippets_test.py
+++ b/samples/samples/snippets_test.py
@@ -197,6 +197,25 @@ def test_create_instance_with_autoscaling_config(capsys, lci_instance_id):
retry_429(instance.delete)()
+def test_create_and_update_instance_default_backup_schedule_type(capsys, lci_instance_id):
+ retry_429(snippets.create_instance_without_default_backup_schedules)(
+ lci_instance_id,
+ )
+ create_out, _ = capsys.readouterr()
+ assert lci_instance_id in create_out
+ assert "without default backup schedules" in create_out
+
+ retry_429(snippets.update_instance_default_backup_schedule_type)(
+ lci_instance_id,
+ )
+ update_out, _ = capsys.readouterr()
+ assert lci_instance_id in update_out
+ assert "to have default backup schedules" in update_out
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(lci_instance_id)
+ retry_429(instance.delete)()
+
+
def test_create_instance_partition(capsys, instance_partition_instance_id):
# Unable to use create_instance since it has editions set where partitions are unsupported.
# The minimal requirement for editions is ENTERPRISE_PLUS for the paritions to get supported.
diff --git a/tests/mockserver_tests/__init__.py b/tests/mockserver_tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/mockserver_tests/mock_server_test_base.py b/tests/mockserver_tests/mock_server_test_base.py
new file mode 100644
index 0000000000..1cd7656297
--- /dev/null
+++ b/tests/mockserver_tests/mock_server_test_base.py
@@ -0,0 +1,139 @@
+# Copyright 2024 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from google.cloud.spanner_dbapi.parsed_statement import AutocommitDmlMode
+from google.cloud.spanner_v1.testing.mock_database_admin import DatabaseAdminServicer
+from google.cloud.spanner_v1.testing.mock_spanner import (
+ start_mock_server,
+ SpannerServicer,
+)
+import google.cloud.spanner_v1.types.type as spanner_type
+import google.cloud.spanner_v1.types.result_set as result_set
+from google.api_core.client_options import ClientOptions
+from google.auth.credentials import AnonymousCredentials
+from google.cloud.spanner_v1 import Client, TypeCode, FixedSizePool
+from google.cloud.spanner_v1.database import Database
+from google.cloud.spanner_v1.instance import Instance
+import grpc
+
+
+def add_result(sql: str, result: result_set.ResultSet):
+ MockServerTestBase.spanner_service.mock_spanner.add_result(sql, result)
+
+
+def add_update_count(
+ sql: str, count: int, dml_mode: AutocommitDmlMode = AutocommitDmlMode.TRANSACTIONAL
+):
+ if dml_mode == AutocommitDmlMode.PARTITIONED_NON_ATOMIC:
+ stats = dict(row_count_lower_bound=count)
+ else:
+ stats = dict(row_count_exact=count)
+ result = result_set.ResultSet(dict(stats=result_set.ResultSetStats(stats)))
+ add_result(sql, result)
+
+
+def add_select1_result():
+ add_single_result("select 1", "c", TypeCode.INT64, [("1",)])
+
+
+def add_single_result(
+ sql: str, column_name: str, type_code: spanner_type.TypeCode, row
+):
+ result = result_set.ResultSet(
+ dict(
+ metadata=result_set.ResultSetMetadata(
+ dict(
+ row_type=spanner_type.StructType(
+ dict(
+ fields=[
+ spanner_type.StructType.Field(
+ dict(
+ name=column_name,
+ type=spanner_type.Type(dict(code=type_code)),
+ )
+ )
+ ]
+ )
+ )
+ )
+ ),
+ )
+ )
+ result.rows.extend(row)
+ MockServerTestBase.spanner_service.mock_spanner.add_result(sql, result)
+
+
+class MockServerTestBase(unittest.TestCase):
+ server: grpc.Server = None
+ spanner_service: SpannerServicer = None
+ database_admin_service: DatabaseAdminServicer = None
+ port: int = None
+
+ def __init__(self, *args, **kwargs):
+ super(MockServerTestBase, self).__init__(*args, **kwargs)
+ self._client = None
+ self._instance = None
+ self._database = None
+
+ @classmethod
+ def setup_class(cls):
+ (
+ MockServerTestBase.server,
+ MockServerTestBase.spanner_service,
+ MockServerTestBase.database_admin_service,
+ MockServerTestBase.port,
+ ) = start_mock_server()
+
+ @classmethod
+ def teardown_class(cls):
+ if MockServerTestBase.server is not None:
+ MockServerTestBase.server.stop(grace=None)
+ MockServerTestBase.server = None
+
+ def setup_method(self, *args, **kwargs):
+ self._client = None
+ self._instance = None
+ self._database = None
+
+ def teardown_method(self, *args, **kwargs):
+ MockServerTestBase.spanner_service.clear_requests()
+ MockServerTestBase.database_admin_service.clear_requests()
+
+ @property
+ def client(self) -> Client:
+ if self._client is None:
+ self._client = Client(
+ project="p",
+ credentials=AnonymousCredentials(),
+ client_options=ClientOptions(
+ api_endpoint="localhost:" + str(MockServerTestBase.port),
+ ),
+ )
+ return self._client
+
+ @property
+ def instance(self) -> Instance:
+ if self._instance is None:
+ self._instance = self.client.instance("test-instance")
+ return self._instance
+
+ @property
+ def database(self) -> Database:
+ if self._database is None:
+ self._database = self.instance.database(
+ "test-database", pool=FixedSizePool(size=10)
+ )
+ return self._database
diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py
new file mode 100644
index 0000000000..ed0906cb9b
--- /dev/null
+++ b/tests/mockserver_tests/test_basics.py
@@ -0,0 +1,87 @@
+# Copyright 2024 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.cloud.spanner_admin_database_v1.types import spanner_database_admin
+from google.cloud.spanner_dbapi import Connection
+from google.cloud.spanner_dbapi.parsed_statement import AutocommitDmlMode
+from google.cloud.spanner_v1 import (
+ BatchCreateSessionsRequest,
+ ExecuteSqlRequest,
+ BeginTransactionRequest,
+ TransactionOptions,
+)
+
+from tests.mockserver_tests.mock_server_test_base import (
+ MockServerTestBase,
+ add_select1_result,
+ add_update_count,
+)
+
+
+class TestBasics(MockServerTestBase):
+ def test_select1(self):
+ add_select1_result()
+ with self.database.snapshot() as snapshot:
+ results = snapshot.execute_sql("select 1")
+ result_list = []
+ for row in results:
+ result_list.append(row)
+ self.assertEqual(1, row[0])
+ self.assertEqual(1, len(result_list))
+ requests = self.spanner_service.requests
+ self.assertEqual(2, len(requests), msg=requests)
+ self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest))
+ self.assertTrue(isinstance(requests[1], ExecuteSqlRequest))
+
+ def test_create_table(self):
+ database_admin_api = self.client.database_admin_api
+ request = spanner_database_admin.UpdateDatabaseDdlRequest(
+ dict(
+ database=database_admin_api.database_path(
+ "test-project", "test-instance", "test-database"
+ ),
+ statements=[
+ "CREATE TABLE Test ("
+ "Id INT64, "
+ "Value STRING(MAX)) "
+ "PRIMARY KEY (Id)",
+ ],
+ )
+ )
+ operation = database_admin_api.update_database_ddl(request)
+ operation.result(1)
+
+ # TODO: Move this to a separate class once the mock server test setup has
+ # been re-factored to use a base class for the boiler plate code.
+ def test_dbapi_partitioned_dml(self):
+ sql = "UPDATE singers SET foo='bar' WHERE active = true"
+ add_update_count(sql, 100, AutocommitDmlMode.PARTITIONED_NON_ATOMIC)
+ connection = Connection(self.instance, self.database)
+ connection.autocommit = True
+ connection.set_autocommit_dml_mode(AutocommitDmlMode.PARTITIONED_NON_ATOMIC)
+ with connection.cursor() as cursor:
+ # Note: SQLAlchemy uses [] as the list of parameters for statements
+ # with no parameters.
+ cursor.execute(sql, [])
+ self.assertEqual(100, cursor.rowcount)
+
+ requests = self.spanner_service.requests
+ self.assertEqual(3, len(requests), msg=requests)
+ self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest))
+ self.assertTrue(isinstance(requests[1], BeginTransactionRequest))
+ self.assertTrue(isinstance(requests[2], ExecuteSqlRequest))
+ begin_request: BeginTransactionRequest = requests[1]
+ self.assertEqual(
+ TransactionOptions(dict(partitioned_dml={})), begin_request.options
+ )
diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py
index feb580d903..a98f100bcc 100644
--- a/tests/system/test_dbapi.py
+++ b/tests/system/test_dbapi.py
@@ -11,11 +11,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
+import base64
import datetime
from collections import defaultdict
+
import pytest
import time
+import decimal
from google.cloud import spanner_v1
from google.cloud._helpers import UTC
@@ -50,7 +52,22 @@
SQL SECURITY INVOKER
AS
SELECT c.email
- FROM contacts AS c;"""
+ FROM contacts AS c;
+
+ CREATE TABLE all_types (
+ id int64,
+ col_bool bool,
+ col_bytes bytes(max),
+ col_date date,
+ col_float32 float32,
+ col_float64 float64,
+ col_int64 int64,
+ col_json json,
+ col_numeric numeric,
+ col_string string(max),
+ coL_timestamp timestamp,
+ ) primary key (col_int64);
+ """
DDL_STATEMENTS = [stmt.strip() for stmt in DDL.split(";") if stmt.strip()]
@@ -1602,3 +1619,29 @@ def test_list_tables(self, include_views):
def test_invalid_statement_error(self):
with pytest.raises(ProgrammingError):
self._cursor.execute("-- comment only")
+
+ def test_insert_all_types(self):
+ """Test inserting all supported data types"""
+
+ self._conn.autocommit = True
+ self._cursor.execute(
+ """
+ INSERT INTO all_types (id, col_bool, col_bytes, col_date, col_float32, col_float64,
+ col_int64, col_json, col_numeric, col_string, col_timestamp)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ """,
+ (
+ 1,
+ True,
+ base64.b64encode(b"test-bytes"),
+ datetime.date(2024, 12, 3),
+ 3.14,
+ 3.14,
+ 123,
+ JsonObject({"key": "value"}),
+ decimal.Decimal("3.14"),
+ "test-string",
+ datetime.datetime(2024, 12, 3, 17, 30, 14),
+ ),
+ )
+ assert self._cursor.rowcount == 1
diff --git a/tests/system/test_observability_options.py b/tests/system/test_observability_options.py
new file mode 100644
index 0000000000..8382255c15
--- /dev/null
+++ b/tests/system/test_observability_options.py
@@ -0,0 +1,134 @@
+# Copyright 2024 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from . import _helpers
+from google.cloud.spanner_v1 import Client
+
+HAS_OTEL_INSTALLED = False
+
+try:
+ from opentelemetry.sdk.trace.export import SimpleSpanProcessor
+ from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
+ InMemorySpanExporter,
+ )
+ from opentelemetry.sdk.trace import TracerProvider
+ from opentelemetry.sdk.trace.sampling import ALWAYS_ON
+ from opentelemetry import trace
+
+ HAS_OTEL_INSTALLED = True
+except ImportError:
+ pass
+
+
+@pytest.mark.skipif(
+ not HAS_OTEL_INSTALLED, reason="OpenTelemetry is necessary to test traces."
+)
+@pytest.mark.skipif(
+ not _helpers.USE_EMULATOR, reason="mulator is necessary to test traces."
+)
+def test_observability_options_propagation():
+ PROJECT = _helpers.EMULATOR_PROJECT
+ CONFIGURATION_NAME = "config-name"
+ INSTANCE_ID = _helpers.INSTANCE_ID
+ DISPLAY_NAME = "display-name"
+ DATABASE_ID = _helpers.unique_id("temp_db")
+ NODE_COUNT = 5
+ LABELS = {"test": "true"}
+
+ def test_propagation(enable_extended_tracing):
+ global_tracer_provider = TracerProvider(sampler=ALWAYS_ON)
+ trace.set_tracer_provider(global_tracer_provider)
+ global_trace_exporter = InMemorySpanExporter()
+ global_tracer_provider.add_span_processor(
+ SimpleSpanProcessor(global_trace_exporter)
+ )
+
+ inject_tracer_provider = TracerProvider(sampler=ALWAYS_ON)
+ inject_trace_exporter = InMemorySpanExporter()
+ inject_tracer_provider.add_span_processor(
+ SimpleSpanProcessor(inject_trace_exporter)
+ )
+ observability_options = dict(
+ tracer_provider=inject_tracer_provider,
+ enable_extended_tracing=enable_extended_tracing,
+ )
+ client = Client(
+ project=PROJECT,
+ observability_options=observability_options,
+ credentials=_make_credentials(),
+ )
+
+ instance = client.instance(
+ INSTANCE_ID,
+ CONFIGURATION_NAME,
+ display_name=DISPLAY_NAME,
+ node_count=NODE_COUNT,
+ labels=LABELS,
+ )
+
+ try:
+ instance.create()
+ except Exception:
+ pass
+
+ db = instance.database(DATABASE_ID)
+ try:
+ db.create()
+ except Exception:
+ pass
+
+ assert db.observability_options == observability_options
+ with db.snapshot() as snapshot:
+ res = snapshot.execute_sql("SELECT 1")
+ for val in res:
+ _ = val
+
+ from_global_spans = global_trace_exporter.get_finished_spans()
+ from_inject_spans = inject_trace_exporter.get_finished_spans()
+ assert (
+ len(from_global_spans) == 0
+ ) # "Expecting no spans from the global trace exporter"
+ assert (
+ len(from_inject_spans) >= 2
+ ) # "Expecting at least 2 spans from the injected trace exporter"
+ gotNames = [span.name for span in from_inject_spans]
+ wantNames = ["CloudSpanner.CreateSession", "CloudSpanner.ReadWriteTransaction"]
+ assert gotNames == wantNames
+
+ # Check for conformance of enable_extended_tracing
+ lastSpan = from_inject_spans[len(from_inject_spans) - 1]
+ wantAnnotatedSQL = "SELECT 1"
+ if not enable_extended_tracing:
+ wantAnnotatedSQL = None
+ assert (
+ lastSpan.attributes.get("db.statement", None) == wantAnnotatedSQL
+ ) # "Mismatch in annotated sql"
+
+ try:
+ db.delete()
+ instance.delete()
+ except Exception:
+ pass
+
+ # Test the respective options for enable_extended_tracing
+ test_propagation(True)
+ test_propagation(False)
+
+
+def _make_credentials():
+ from google.auth.credentials import AnonymousCredentials
+
+ return AnonymousCredentials()
diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py
index 5322527d12..b7337cb258 100644
--- a/tests/system/test_session_api.py
+++ b/tests/system/test_session_api.py
@@ -2018,17 +2018,20 @@ def test_execute_sql_w_manual_consume(sessions_database):
row_count = 3000
committed = _set_up_table(sessions_database, row_count)
- with sessions_database.snapshot(read_timestamp=committed) as snapshot:
- streamed = snapshot.execute_sql(sd.SQL)
+ for lazy_decode in [False, True]:
+ with sessions_database.snapshot(read_timestamp=committed) as snapshot:
+ streamed = snapshot.execute_sql(sd.SQL, lazy_decode=lazy_decode)
- keyset = spanner_v1.KeySet(all_=True)
+ keyset = spanner_v1.KeySet(all_=True)
- with sessions_database.snapshot(read_timestamp=committed) as snapshot:
- rows = list(snapshot.read(sd.TABLE, sd.COLUMNS, keyset))
+ with sessions_database.snapshot(read_timestamp=committed) as snapshot:
+ rows = list(
+ snapshot.read(sd.TABLE, sd.COLUMNS, keyset, lazy_decode=lazy_decode)
+ )
- assert list(streamed) == rows
- assert streamed._current_row == []
- assert streamed._pending_chunk is None
+ assert list(streamed) == rows
+ assert streamed._current_row == []
+ assert streamed._pending_chunk is None
def test_execute_sql_w_to_dict_list(sessions_database):
@@ -2057,16 +2060,23 @@ def _check_sql_results(
if order and "ORDER" not in sql:
sql += " ORDER BY pkey"
- with database.snapshot() as snapshot:
- rows = list(
- snapshot.execute_sql(
- sql, params=params, param_types=param_types, column_info=column_info
+ for lazy_decode in [False, True]:
+ with database.snapshot() as snapshot:
+ iterator = snapshot.execute_sql(
+ sql,
+ params=params,
+ param_types=param_types,
+ column_info=column_info,
+ lazy_decode=lazy_decode,
)
- )
+ rows = list(iterator)
+ if lazy_decode:
+ for index, row in enumerate(rows):
+ rows[index] = iterator.decode_row(row)
- _sample_data._check_rows_data(
- rows, expected=expected, recurse_into_lists=recurse_into_lists
- )
+ _sample_data._check_rows_data(
+ rows, expected=expected, recurse_into_lists=recurse_into_lists
+ )
def test_multiuse_snapshot_execute_sql_isolation_strong(sessions_database):
diff --git a/tests/unit/spanner_dbapi/test_connection.py b/tests/unit/spanner_dbapi/test_connection.py
index d0fa521f8f..4bee9e93c7 100644
--- a/tests/unit/spanner_dbapi/test_connection.py
+++ b/tests/unit/spanner_dbapi/test_connection.py
@@ -138,6 +138,10 @@ def test_read_only_connection(self):
):
connection.read_only = False
+ # Verify that we can set the value to the same value as it already has.
+ connection.read_only = True
+ self.assertTrue(connection.read_only)
+
connection._spanner_transaction_started = False
connection.read_only = False
self.assertFalse(connection.read_only)
@@ -300,6 +304,19 @@ def test_commit_in_autocommit_mode(self, mock_warn):
CLIENT_TRANSACTION_NOT_STARTED_WARNING, UserWarning, stacklevel=2
)
+ @mock.patch.object(warnings, "warn")
+ def test_commit_in_autocommit_mode_with_ignore_warnings(self, mock_warn):
+ conn = self._make_connection(
+ DatabaseDialect.DATABASE_DIALECT_UNSPECIFIED,
+ ignore_transaction_warnings=True,
+ )
+ assert conn._ignore_transaction_warnings
+ conn._autocommit = True
+
+ conn.commit()
+
+ assert not mock_warn.warn.called
+
def test_commit_database_error(self):
from google.cloud.spanner_dbapi import Connection
@@ -652,6 +669,20 @@ def test_staleness_inside_transaction(self):
with self.assertRaises(ValueError):
connection.staleness = {"read_timestamp": datetime.datetime(2021, 9, 21)}
+ def test_staleness_inside_transaction_same_value(self):
+ """
+ Verify that setting `staleness` to the same value in a transaction is allowed.
+ """
+ connection = self._make_connection()
+ connection.staleness = {"read_timestamp": datetime.datetime(2021, 9, 21)}
+ connection._spanner_transaction_started = True
+ connection._transaction = mock.Mock()
+
+ connection.staleness = {"read_timestamp": datetime.datetime(2021, 9, 21)}
+ self.assertEqual(
+ connection.staleness, {"read_timestamp": datetime.datetime(2021, 9, 21)}
+ )
+
def test_staleness_multi_use(self):
"""
Check that `staleness` option is correctly
diff --git a/tests/unit/spanner_dbapi/test_parse_utils.py b/tests/unit/spanner_dbapi/test_parse_utils.py
index 3a325014fa..4b1c7cdb06 100644
--- a/tests/unit/spanner_dbapi/test_parse_utils.py
+++ b/tests/unit/spanner_dbapi/test_parse_utils.py
@@ -218,6 +218,8 @@ def test_get_param_types(self):
params = {
"a1": 10,
"b1": "string",
+ # Note: We only want a value and not a type for this.
+ # Instead, we let Spanner infer the correct type (FLOAT64 or FLOAT32)
"c1": 10.39,
"d1": TimestampStr("2005-08-30T01:01:01.000001Z"),
"e1": DateStr("2019-12-05"),
@@ -232,7 +234,6 @@ def test_get_param_types(self):
want_types = {
"a1": param_types.INT64,
"b1": param_types.STRING,
- "c1": param_types.FLOAT64,
"d1": param_types.TIMESTAMP,
"e1": param_types.DATE,
"f1": param_types.BOOL,
diff --git a/tests/unit/test_pool.py b/tests/unit/test_pool.py
index 23ed3e7251..2e3b46fa73 100644
--- a/tests/unit/test_pool.py
+++ b/tests/unit/test_pool.py
@@ -15,6 +15,7 @@
from functools import total_ordering
import unittest
+from datetime import datetime, timedelta
import mock
@@ -184,13 +185,30 @@ def test_bind(self):
for session in SESSIONS:
session.create.assert_not_called()
- def test_get_non_expired(self):
+ def test_get_active(self):
pool = self._make_one(size=4)
database = _Database("name")
SESSIONS = sorted([_Session(database) for i in range(0, 4)])
database._sessions.extend(SESSIONS)
pool.bind(database)
+ # check if sessions returned in LIFO order
+ for i in (3, 2, 1, 0):
+ session = pool.get()
+ self.assertIs(session, SESSIONS[i])
+ self.assertFalse(session._exists_checked)
+ self.assertFalse(pool._sessions.full())
+
+ def test_get_non_expired(self):
+ pool = self._make_one(size=4)
+ database = _Database("name")
+ last_use_time = datetime.utcnow() - timedelta(minutes=56)
+ SESSIONS = sorted(
+ [_Session(database, last_use_time=last_use_time) for i in range(0, 4)]
+ )
+ database._sessions.extend(SESSIONS)
+ pool.bind(database)
+
# check if sessions returned in LIFO order
for i in (3, 2, 1, 0):
session = pool.get()
@@ -201,7 +219,8 @@ def test_get_non_expired(self):
def test_get_expired(self):
pool = self._make_one(size=4)
database = _Database("name")
- SESSIONS = [_Session(database)] * 5
+ last_use_time = datetime.utcnow() - timedelta(minutes=65)
+ SESSIONS = [_Session(database, last_use_time=last_use_time)] * 5
SESSIONS[0]._exists = False
database._sessions.extend(SESSIONS)
pool.bind(database)
@@ -915,7 +934,9 @@ def _make_transaction(*args, **kw):
class _Session(object):
_transaction = None
- def __init__(self, database, exists=True, transaction=None):
+ def __init__(
+ self, database, exists=True, transaction=None, last_use_time=datetime.utcnow()
+ ):
self._database = database
self._exists = exists
self._exists_checked = False
@@ -923,10 +944,15 @@ def __init__(self, database, exists=True, transaction=None):
self.create = mock.Mock()
self._deleted = False
self._transaction = transaction
+ self._last_use_time = last_use_time
def __lt__(self, other):
return id(self) < id(other)
+ @property
+ def last_use_time(self):
+ return self._last_use_time
+
def exists(self):
self._exists_checked = True
return self._exists
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy