From 03400c40f1c1cc73e51733f2a28910a8dd78e7d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Wed, 2 Apr 2025 14:55:55 +0200 Subject: [PATCH 1/8] feat: support transaction isolation level in dbapi (#1327) Adds API arguments and functions for setting a default isolation level and an isolation level per transaction. Support for specifying the isolation level using SQL commands will be added in a follow-up PR. --- google/cloud/spanner_dbapi/connection.py | 40 +++++- .../test_dbapi_isolation_level.py | 119 ++++++++++++++++++ 2 files changed, 157 insertions(+), 2 deletions(-) create mode 100644 tests/mockserver_tests/test_dbapi_isolation_level.py diff --git a/google/cloud/spanner_dbapi/connection.py b/google/cloud/spanner_dbapi/connection.py index c2aa385d2a..adcb9e97eb 100644 --- a/google/cloud/spanner_dbapi/connection.py +++ b/google/cloud/spanner_dbapi/connection.py @@ -29,7 +29,7 @@ from google.cloud.spanner_dbapi.parsed_statement import ParsedStatement, Statement from google.cloud.spanner_dbapi.transaction_helper import TransactionRetryHelper from google.cloud.spanner_dbapi.cursor import Cursor -from google.cloud.spanner_v1 import RequestOptions +from google.cloud.spanner_v1 import RequestOptions, TransactionOptions from google.cloud.spanner_v1.snapshot import Snapshot from google.cloud.spanner_dbapi.exceptions import ( @@ -112,6 +112,7 @@ def __init__(self, instance, database=None, read_only=False, **kwargs): self._staleness = None self.request_priority = None self._transaction_begin_marked = False + self._transaction_isolation_level = None # whether transaction started at Spanner. This means that we had # made at least one call to Spanner. self._spanner_transaction_started = False @@ -283,6 +284,33 @@ def transaction_tag(self, value): """ self._connection_variables["transaction_tag"] = value + @property + def isolation_level(self): + """The default isolation level that is used for all read/write + transactions on this `Connection`. + + Returns: + google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel: + The isolation level that is used for read/write transactions on + this `Connection`. + """ + return self._connection_variables.get( + "isolation_level", + TransactionOptions.IsolationLevel.ISOLATION_LEVEL_UNSPECIFIED, + ) + + @isolation_level.setter + def isolation_level(self, value: TransactionOptions.IsolationLevel): + """Sets the isolation level that is used for all read/write + transactions on this `Connection`. + + Args: + value (google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel): + The isolation level for all read/write transactions on this + `Connection`. + """ + self._connection_variables["isolation_level"] = value + @property def staleness(self): """Current read staleness option value of this `Connection`. @@ -363,6 +391,12 @@ def transaction_checkout(self): if not self._spanner_transaction_started: self._transaction = self._session_checkout().transaction() self._transaction.transaction_tag = self.transaction_tag + if self._transaction_isolation_level: + self._transaction.isolation_level = ( + self._transaction_isolation_level + ) + else: + self._transaction.isolation_level = self.isolation_level self.transaction_tag = None self._snapshot = None self._spanner_transaction_started = True @@ -405,7 +439,7 @@ def close(self): self.is_closed = True @check_not_closed - def begin(self): + def begin(self, isolation_level=None): """ Marks the transaction as started. @@ -421,6 +455,7 @@ def begin(self): "is already running" ) self._transaction_begin_marked = True + self._transaction_isolation_level = isolation_level def commit(self): """Commits any pending transaction to the database. @@ -465,6 +500,7 @@ def _reset_post_commit_or_rollback(self): self._release_session() self._transaction_helper.reset() self._transaction_begin_marked = False + self._transaction_isolation_level = None self._spanner_transaction_started = False @check_not_closed diff --git a/tests/mockserver_tests/test_dbapi_isolation_level.py b/tests/mockserver_tests/test_dbapi_isolation_level.py new file mode 100644 index 0000000000..e2b6ddbb46 --- /dev/null +++ b/tests/mockserver_tests/test_dbapi_isolation_level.py @@ -0,0 +1,119 @@ +# Copyright 2025 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.spanner_dbapi import Connection +from google.cloud.spanner_v1 import ( + BeginTransactionRequest, + TransactionOptions, +) +from tests.mockserver_tests.mock_server_test_base import ( + MockServerTestBase, + add_update_count, +) + + +class TestDbapiIsolationLevel(MockServerTestBase): + @classmethod + def setup_class(cls): + super().setup_class() + add_update_count("insert into singers (id, name) values (1, 'Some Singer')", 1) + + def test_isolation_level_default(self): + connection = Connection(self.instance, self.database) + with connection.cursor() as cursor: + cursor.execute("insert into singers (id, name) values (1, 'Some Singer')") + self.assertEqual(1, cursor.rowcount) + connection.commit() + begin_requests = list( + filter( + lambda msg: isinstance(msg, BeginTransactionRequest), + self.spanner_service.requests, + ) + ) + self.assertEqual(1, len(begin_requests)) + self.assertEqual( + begin_requests[0].options.isolation_level, + TransactionOptions.IsolationLevel.ISOLATION_LEVEL_UNSPECIFIED, + ) + + def test_custom_isolation_level(self): + connection = Connection(self.instance, self.database) + for level in [ + TransactionOptions.IsolationLevel.ISOLATION_LEVEL_UNSPECIFIED, + TransactionOptions.IsolationLevel.REPEATABLE_READ, + TransactionOptions.IsolationLevel.SERIALIZABLE, + ]: + connection.isolation_level = level + with connection.cursor() as cursor: + cursor.execute( + "insert into singers (id, name) values (1, 'Some Singer')" + ) + self.assertEqual(1, cursor.rowcount) + connection.commit() + begin_requests = list( + filter( + lambda msg: isinstance(msg, BeginTransactionRequest), + self.spanner_service.requests, + ) + ) + self.assertEqual(1, len(begin_requests)) + self.assertEqual(begin_requests[0].options.isolation_level, level) + MockServerTestBase.spanner_service.clear_requests() + + def test_isolation_level_in_connection_kwargs(self): + for level in [ + TransactionOptions.IsolationLevel.ISOLATION_LEVEL_UNSPECIFIED, + TransactionOptions.IsolationLevel.REPEATABLE_READ, + TransactionOptions.IsolationLevel.SERIALIZABLE, + ]: + connection = Connection(self.instance, self.database, isolation_level=level) + with connection.cursor() as cursor: + cursor.execute( + "insert into singers (id, name) values (1, 'Some Singer')" + ) + self.assertEqual(1, cursor.rowcount) + connection.commit() + begin_requests = list( + filter( + lambda msg: isinstance(msg, BeginTransactionRequest), + self.spanner_service.requests, + ) + ) + self.assertEqual(1, len(begin_requests)) + self.assertEqual(begin_requests[0].options.isolation_level, level) + MockServerTestBase.spanner_service.clear_requests() + + def test_transaction_isolation_level(self): + connection = Connection(self.instance, self.database) + for level in [ + TransactionOptions.IsolationLevel.ISOLATION_LEVEL_UNSPECIFIED, + TransactionOptions.IsolationLevel.REPEATABLE_READ, + TransactionOptions.IsolationLevel.SERIALIZABLE, + ]: + connection.begin(isolation_level=level) + with connection.cursor() as cursor: + cursor.execute( + "insert into singers (id, name) values (1, 'Some Singer')" + ) + self.assertEqual(1, cursor.rowcount) + connection.commit() + begin_requests = list( + filter( + lambda msg: isinstance(msg, BeginTransactionRequest), + self.spanner_service.requests, + ) + ) + self.assertEqual(1, len(begin_requests)) + self.assertEqual(begin_requests[0].options.isolation_level, level) + MockServerTestBase.spanner_service.clear_requests() From b3c259deec817812fd8e4940faacf4a927d0d69c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Thu, 3 Apr 2025 10:14:19 +0200 Subject: [PATCH 2/8] fix: improve client-side regex statement parser (#1328) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: improve client-side regex statement parser The client-side regex-based statement parser contained multiple minor errors, like: - BEGIN would match any string as BEGIN TRANSACTION (including stuff like `BEGIN foo`) - COMMIT and ROLLBACK had the same problem as BEGIN. - Mismatches were reported as UPDATE. They are now returned as UNKNOWN. - DLL missed the ANALYZE keyword * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/spanner_dbapi/batch_dml_executor.py | 3 +++ .../client_side_statement_parser.py | 16 +++++++------- google/cloud/spanner_dbapi/connection.py | 10 +-------- google/cloud/spanner_dbapi/cursor.py | 3 +++ google/cloud/spanner_dbapi/parse_utils.py | 22 ++++++++++++++----- .../cloud/spanner_dbapi/parsed_statement.py | 1 + tests/unit/spanner_dbapi/test_parse_utils.py | 22 ++++++++++++++++++- 7 files changed, 53 insertions(+), 24 deletions(-) diff --git a/google/cloud/spanner_dbapi/batch_dml_executor.py b/google/cloud/spanner_dbapi/batch_dml_executor.py index 5c4e2495bb..a3ff606295 100644 --- a/google/cloud/spanner_dbapi/batch_dml_executor.py +++ b/google/cloud/spanner_dbapi/batch_dml_executor.py @@ -54,9 +54,12 @@ def execute_statement(self, parsed_statement: ParsedStatement): """ from google.cloud.spanner_dbapi import ProgrammingError + # Note: Let the server handle it if the client-side parser did not + # recognize the type of statement. if ( parsed_statement.statement_type != StatementType.UPDATE and parsed_statement.statement_type != StatementType.INSERT + and parsed_statement.statement_type != StatementType.UNKNOWN ): raise ProgrammingError("Only DML statements are allowed in batch DML mode.") self._statements.append(parsed_statement.statement) diff --git a/google/cloud/spanner_dbapi/client_side_statement_parser.py b/google/cloud/spanner_dbapi/client_side_statement_parser.py index 002779adb4..f978d17f03 100644 --- a/google/cloud/spanner_dbapi/client_side_statement_parser.py +++ b/google/cloud/spanner_dbapi/client_side_statement_parser.py @@ -21,18 +21,18 @@ Statement, ) -RE_BEGIN = re.compile(r"^\s*(BEGIN|START)(TRANSACTION)?", re.IGNORECASE) -RE_COMMIT = re.compile(r"^\s*(COMMIT)(TRANSACTION)?", re.IGNORECASE) -RE_ROLLBACK = re.compile(r"^\s*(ROLLBACK)(TRANSACTION)?", re.IGNORECASE) +RE_BEGIN = re.compile(r"^\s*(BEGIN|START)(\s+TRANSACTION)?\s*$", re.IGNORECASE) +RE_COMMIT = re.compile(r"^\s*(COMMIT)(\s+TRANSACTION)?\s*$", re.IGNORECASE) +RE_ROLLBACK = re.compile(r"^\s*(ROLLBACK)(\s+TRANSACTION)?\s*$", re.IGNORECASE) RE_SHOW_COMMIT_TIMESTAMP = re.compile( - r"^\s*(SHOW)\s+(VARIABLE)\s+(COMMIT_TIMESTAMP)", re.IGNORECASE + r"^\s*(SHOW)\s+(VARIABLE)\s+(COMMIT_TIMESTAMP)\s*$", re.IGNORECASE ) RE_SHOW_READ_TIMESTAMP = re.compile( - r"^\s*(SHOW)\s+(VARIABLE)\s+(READ_TIMESTAMP)", re.IGNORECASE + r"^\s*(SHOW)\s+(VARIABLE)\s+(READ_TIMESTAMP)\s*$", re.IGNORECASE ) -RE_START_BATCH_DML = re.compile(r"^\s*(START)\s+(BATCH)\s+(DML)", re.IGNORECASE) -RE_RUN_BATCH = re.compile(r"^\s*(RUN)\s+(BATCH)", re.IGNORECASE) -RE_ABORT_BATCH = re.compile(r"^\s*(ABORT)\s+(BATCH)", re.IGNORECASE) +RE_START_BATCH_DML = re.compile(r"^\s*(START)\s+(BATCH)\s+(DML)\s*$", re.IGNORECASE) +RE_RUN_BATCH = re.compile(r"^\s*(RUN)\s+(BATCH)\s*$", re.IGNORECASE) +RE_ABORT_BATCH = re.compile(r"^\s*(ABORT)\s+(BATCH)\s*$", re.IGNORECASE) RE_PARTITION_QUERY = re.compile(r"^\s*(PARTITION)\s+(.+)", re.IGNORECASE) RE_RUN_PARTITION = re.compile(r"^\s*(RUN)\s+(PARTITION)\s+(.+)", re.IGNORECASE) RE_RUN_PARTITIONED_QUERY = re.compile( diff --git a/google/cloud/spanner_dbapi/connection.py b/google/cloud/spanner_dbapi/connection.py index adcb9e97eb..a615a282b5 100644 --- a/google/cloud/spanner_dbapi/connection.py +++ b/google/cloud/spanner_dbapi/connection.py @@ -20,11 +20,7 @@ from google.cloud import spanner_v1 as spanner from google.cloud.spanner_dbapi import partition_helper from google.cloud.spanner_dbapi.batch_dml_executor import BatchMode, BatchDmlExecutor -from google.cloud.spanner_dbapi.parse_utils import _get_statement_type -from google.cloud.spanner_dbapi.parsed_statement import ( - StatementType, - AutocommitDmlMode, -) +from google.cloud.spanner_dbapi.parsed_statement import AutocommitDmlMode from google.cloud.spanner_dbapi.partition_helper import PartitionId from google.cloud.spanner_dbapi.parsed_statement import ParsedStatement, Statement from google.cloud.spanner_dbapi.transaction_helper import TransactionRetryHelper @@ -702,10 +698,6 @@ def set_autocommit_dml_mode( self._autocommit_dml_mode = autocommit_dml_mode def _partitioned_query_validation(self, partitioned_query, statement): - if _get_statement_type(Statement(partitioned_query)) is not StatementType.QUERY: - raise ProgrammingError( - "Only queries can be partitioned. Invalid statement: " + statement.sql - ) if self.read_only is not True and self._client_transaction_started is True: raise ProgrammingError( "Partitioned query is not supported, because the connection is in a read/write transaction." diff --git a/google/cloud/spanner_dbapi/cursor.py b/google/cloud/spanner_dbapi/cursor.py index 5c1539e7fc..75a368c89f 100644 --- a/google/cloud/spanner_dbapi/cursor.py +++ b/google/cloud/spanner_dbapi/cursor.py @@ -404,9 +404,12 @@ def executemany(self, operation, seq_of_params): # For every operation, we've got to ensure that any prior DDL # statements were run. self.connection.run_prior_DDL_statements() + # Treat UNKNOWN statements as if they are DML and let the server + # determine what is wrong with it. if self._parsed_statement.statement_type in ( StatementType.INSERT, StatementType.UPDATE, + StatementType.UNKNOWN, ): statements = [] for params in seq_of_params: diff --git a/google/cloud/spanner_dbapi/parse_utils.py b/google/cloud/spanner_dbapi/parse_utils.py index 245840ca0d..66741eb264 100644 --- a/google/cloud/spanner_dbapi/parse_utils.py +++ b/google/cloud/spanner_dbapi/parse_utils.py @@ -155,6 +155,7 @@ STMT_INSERT = "INSERT" # Heuristic for identifying statements that don't need to be run as updates. +# TODO: This and the other regexes do not match statements that start with a hint. RE_NON_UPDATE = re.compile(r"^\W*(SELECT|GRAPH|FROM)", re.IGNORECASE) RE_WITH = re.compile(r"^\s*(WITH)", re.IGNORECASE) @@ -162,18 +163,22 @@ # DDL statements follow # https://cloud.google.com/spanner/docs/data-definition-language RE_DDL = re.compile( - r"^\s*(CREATE|ALTER|DROP|GRANT|REVOKE|RENAME)", re.IGNORECASE | re.DOTALL + r"^\s*(CREATE|ALTER|DROP|GRANT|REVOKE|RENAME|ANALYZE)", re.IGNORECASE | re.DOTALL ) -RE_IS_INSERT = re.compile(r"^\s*(INSERT)", re.IGNORECASE | re.DOTALL) +# TODO: These do not match statements that start with a hint. +RE_IS_INSERT = re.compile(r"^\s*(INSERT\s+)", re.IGNORECASE | re.DOTALL) +RE_IS_UPDATE = re.compile(r"^\s*(UPDATE\s+)", re.IGNORECASE | re.DOTALL) +RE_IS_DELETE = re.compile(r"^\s*(DELETE\s+)", re.IGNORECASE | re.DOTALL) RE_INSERT = re.compile( # Only match the `INSERT INTO (columns...) # otherwise the rest of the statement could be a complex # operation. - r"^\s*INSERT INTO (?P[^\s\(\)]+)\s*\((?P[^\(\)]+)\)", + r"^\s*INSERT(?:\s+INTO)?\s+(?P[^\s\(\)]+)\s*\((?P[^\(\)]+)\)", re.IGNORECASE | re.DOTALL, ) +"""Deprecated: Use the RE_IS_INSERT, RE_IS_UPDATE, and RE_IS_DELETE regexes""" RE_VALUES_TILL_END = re.compile(r"VALUES\s*\(.+$", re.IGNORECASE | re.DOTALL) @@ -259,8 +264,13 @@ def _get_statement_type(statement): # statements and doesn't yet support WITH for DML statements. return StatementType.QUERY - statement.sql = ensure_where_clause(query) - return StatementType.UPDATE + if RE_IS_UPDATE.match(query) or RE_IS_DELETE.match(query): + # TODO: Remove this? It makes more sense to have this in SQLAlchemy and + # Django than here. + statement.sql = ensure_where_clause(query) + return StatementType.UPDATE + + return StatementType.UNKNOWN def sql_pyformat_args_to_spanner(sql, params): @@ -355,7 +365,7 @@ def get_param_types(params): def ensure_where_clause(sql): """ Cloud Spanner requires a WHERE clause on UPDATE and DELETE statements. - Add a dummy WHERE clause if non detected. + Add a dummy WHERE clause if not detected. :type sql: str :param sql: SQL code to check. diff --git a/google/cloud/spanner_dbapi/parsed_statement.py b/google/cloud/spanner_dbapi/parsed_statement.py index f89d6ea19e..a8d03f6fa4 100644 --- a/google/cloud/spanner_dbapi/parsed_statement.py +++ b/google/cloud/spanner_dbapi/parsed_statement.py @@ -17,6 +17,7 @@ class StatementType(Enum): + UNKNOWN = 0 CLIENT_SIDE = 1 DDL = 2 QUERY = 3 diff --git a/tests/unit/spanner_dbapi/test_parse_utils.py b/tests/unit/spanner_dbapi/test_parse_utils.py index f0721bdbe3..031fbc443f 100644 --- a/tests/unit/spanner_dbapi/test_parse_utils.py +++ b/tests/unit/spanner_dbapi/test_parse_utils.py @@ -74,11 +74,31 @@ def test_classify_stmt(self): ("REVOKE SELECT ON TABLE Singers TO ROLE parent", StatementType.DDL), ("GRANT ROLE parent TO ROLE child", StatementType.DDL), ("INSERT INTO table (col1) VALUES (1)", StatementType.INSERT), + ("INSERT table (col1) VALUES (1)", StatementType.INSERT), + ("INSERT OR UPDATE table (col1) VALUES (1)", StatementType.INSERT), + ("INSERT OR IGNORE table (col1) VALUES (1)", StatementType.INSERT), ("UPDATE table SET col1 = 1 WHERE col1 = NULL", StatementType.UPDATE), + ("delete from table WHERE col1 = 2", StatementType.UPDATE), + ("delete from table WHERE col1 in (select 1)", StatementType.UPDATE), + ("dlete from table where col1 = 2", StatementType.UNKNOWN), + ("udpate table set col2=1 where col1 = 2", StatementType.UNKNOWN), + ("begin foo", StatementType.UNKNOWN), + ("begin transaction foo", StatementType.UNKNOWN), + ("commit foo", StatementType.UNKNOWN), + ("commit transaction foo", StatementType.UNKNOWN), + ("rollback foo", StatementType.UNKNOWN), + ("rollback transaction foo", StatementType.UNKNOWN), + ("show variable", StatementType.UNKNOWN), + ("show variable read_timestamp foo", StatementType.UNKNOWN), + ("INSERTs INTO table (col1) VALUES (1)", StatementType.UNKNOWN), + ("UPDATEs table SET col1 = 1 WHERE col1 = NULL", StatementType.UNKNOWN), + ("DELETEs from table WHERE col1 = 2", StatementType.UNKNOWN), ) for query, want_class in cases: - self.assertEqual(classify_statement(query).statement_type, want_class) + self.assertEqual( + classify_statement(query).statement_type, want_class, query + ) def test_partition_query_classify_stmt(self): parsed_statement = classify_statement( From 3ac0f9131b38e5cfb2b574d3d73b03736b871712 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Mon, 14 Apr 2025 13:31:03 +0200 Subject: [PATCH 3/8] feat: add SQL statement for begin transaction isolation level (#1331) * feat: add SQL statement for egin transaction isolation level Adds an additional option to the `begin [transaction]` SQL statement to specify the isolation level of that transaction. The following format is now supported: ``` {begin | start} [transaction] [isolation level {repeatable read | serializable}] ``` * test: add test for invalid isolation level --- .../client_side_statement_executor.py | 21 +++++- .../client_side_statement_parser.py | 9 ++- .../test_dbapi_isolation_level.py | 31 ++++++++ .../test_client_side_statement_executor.py | 54 ++++++++++++++ tests/unit/spanner_dbapi/test_parse_utils.py | 74 +++++++++++++++++++ 5 files changed, 186 insertions(+), 3 deletions(-) create mode 100644 tests/unit/spanner_dbapi/test_client_side_statement_executor.py diff --git a/google/cloud/spanner_dbapi/client_side_statement_executor.py b/google/cloud/spanner_dbapi/client_side_statement_executor.py index b1ed2873ae..ffda11f8b8 100644 --- a/google/cloud/spanner_dbapi/client_side_statement_executor.py +++ b/google/cloud/spanner_dbapi/client_side_statement_executor.py @@ -11,7 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Union +from google.cloud.spanner_v1 import TransactionOptions if TYPE_CHECKING: from google.cloud.spanner_dbapi.cursor import Cursor @@ -58,7 +59,7 @@ def execute(cursor: "Cursor", parsed_statement: ParsedStatement): connection.commit() return None if statement_type == ClientSideStatementType.BEGIN: - connection.begin() + connection.begin(isolation_level=_get_isolation_level(parsed_statement)) return None if statement_type == ClientSideStatementType.ROLLBACK: connection.rollback() @@ -121,3 +122,19 @@ def _get_streamed_result_set(column_name, type_code, column_values): column_values_pb.append(_make_value_pb(column_value)) result_set.values.extend(column_values_pb) return StreamedResultSet(iter([result_set])) + + +def _get_isolation_level( + statement: ParsedStatement, +) -> Union[TransactionOptions.IsolationLevel, None]: + if ( + statement.client_side_statement_params is None + or len(statement.client_side_statement_params) == 0 + ): + return None + level = statement.client_side_statement_params[0] + if not isinstance(level, str) or level == "": + return None + # Replace (duplicate) whitespaces in the string with an underscore. + level = "_".join(level.split()).upper() + return TransactionOptions.IsolationLevel[level] diff --git a/google/cloud/spanner_dbapi/client_side_statement_parser.py b/google/cloud/spanner_dbapi/client_side_statement_parser.py index f978d17f03..7c26c2a98d 100644 --- a/google/cloud/spanner_dbapi/client_side_statement_parser.py +++ b/google/cloud/spanner_dbapi/client_side_statement_parser.py @@ -21,7 +21,10 @@ Statement, ) -RE_BEGIN = re.compile(r"^\s*(BEGIN|START)(\s+TRANSACTION)?\s*$", re.IGNORECASE) +RE_BEGIN = re.compile( + r"^\s*(?:BEGIN|START)(?:\s+TRANSACTION)?(?:\s+ISOLATION\s+LEVEL\s+(REPEATABLE\s+READ|SERIALIZABLE))?\s*$", + re.IGNORECASE, +) RE_COMMIT = re.compile(r"^\s*(COMMIT)(\s+TRANSACTION)?\s*$", re.IGNORECASE) RE_ROLLBACK = re.compile(r"^\s*(ROLLBACK)(\s+TRANSACTION)?\s*$", re.IGNORECASE) RE_SHOW_COMMIT_TIMESTAMP = re.compile( @@ -68,6 +71,10 @@ def parse_stmt(query): elif RE_START_BATCH_DML.match(query): client_side_statement_type = ClientSideStatementType.START_BATCH_DML elif RE_BEGIN.match(query): + match = re.search(RE_BEGIN, query) + isolation_level = match.group(1) + if isolation_level is not None: + client_side_statement_params.append(isolation_level) client_side_statement_type = ClientSideStatementType.BEGIN elif RE_RUN_BATCH.match(query): client_side_statement_type = ClientSideStatementType.RUN_BATCH diff --git a/tests/mockserver_tests/test_dbapi_isolation_level.py b/tests/mockserver_tests/test_dbapi_isolation_level.py index e2b6ddbb46..679740969a 100644 --- a/tests/mockserver_tests/test_dbapi_isolation_level.py +++ b/tests/mockserver_tests/test_dbapi_isolation_level.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.api_core.exceptions import Unknown from google.cloud.spanner_dbapi import Connection from google.cloud.spanner_v1 import ( BeginTransactionRequest, @@ -117,3 +118,33 @@ def test_transaction_isolation_level(self): self.assertEqual(1, len(begin_requests)) self.assertEqual(begin_requests[0].options.isolation_level, level) MockServerTestBase.spanner_service.clear_requests() + + def test_begin_isolation_level(self): + connection = Connection(self.instance, self.database) + for level in [ + TransactionOptions.IsolationLevel.REPEATABLE_READ, + TransactionOptions.IsolationLevel.SERIALIZABLE, + ]: + isolation_level_name = level.name.replace("_", " ") + with connection.cursor() as cursor: + cursor.execute(f"begin isolation level {isolation_level_name}") + cursor.execute( + "insert into singers (id, name) values (1, 'Some Singer')" + ) + self.assertEqual(1, cursor.rowcount) + connection.commit() + begin_requests = list( + filter( + lambda msg: isinstance(msg, BeginTransactionRequest), + self.spanner_service.requests, + ) + ) + self.assertEqual(1, len(begin_requests)) + self.assertEqual(begin_requests[0].options.isolation_level, level) + MockServerTestBase.spanner_service.clear_requests() + + def test_begin_invalid_isolation_level(self): + connection = Connection(self.instance, self.database) + with connection.cursor() as cursor: + with self.assertRaises(Unknown): + cursor.execute("begin isolation level does_not_exist") diff --git a/tests/unit/spanner_dbapi/test_client_side_statement_executor.py b/tests/unit/spanner_dbapi/test_client_side_statement_executor.py new file mode 100644 index 0000000000..888f81e830 --- /dev/null +++ b/tests/unit/spanner_dbapi/test_client_side_statement_executor.py @@ -0,0 +1,54 @@ +# Copyright 2025 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from google.cloud.spanner_dbapi.client_side_statement_executor import ( + _get_isolation_level, +) +from google.cloud.spanner_dbapi.parse_utils import classify_statement +from google.cloud.spanner_v1 import TransactionOptions + + +class TestParseUtils(unittest.TestCase): + def test_get_isolation_level(self): + self.assertIsNone(_get_isolation_level(classify_statement("begin"))) + self.assertEqual( + TransactionOptions.IsolationLevel.SERIALIZABLE, + _get_isolation_level( + classify_statement("begin isolation level serializable") + ), + ) + self.assertEqual( + TransactionOptions.IsolationLevel.SERIALIZABLE, + _get_isolation_level( + classify_statement( + "begin transaction isolation level serializable " + ) + ), + ) + self.assertEqual( + TransactionOptions.IsolationLevel.REPEATABLE_READ, + _get_isolation_level( + classify_statement("begin isolation level repeatable read") + ), + ) + self.assertEqual( + TransactionOptions.IsolationLevel.REPEATABLE_READ, + _get_isolation_level( + classify_statement( + "begin transaction isolation level repeatable read " + ) + ), + ) diff --git a/tests/unit/spanner_dbapi/test_parse_utils.py b/tests/unit/spanner_dbapi/test_parse_utils.py index 031fbc443f..f63dbb78e4 100644 --- a/tests/unit/spanner_dbapi/test_parse_utils.py +++ b/tests/unit/spanner_dbapi/test_parse_utils.py @@ -63,8 +63,28 @@ def test_classify_stmt(self): ("commit", StatementType.CLIENT_SIDE), ("begin", StatementType.CLIENT_SIDE), ("start", StatementType.CLIENT_SIDE), + ("begin isolation level serializable", StatementType.CLIENT_SIDE), + ("start isolation level serializable", StatementType.CLIENT_SIDE), + ("begin isolation level repeatable read", StatementType.CLIENT_SIDE), + ("start isolation level repeatable read", StatementType.CLIENT_SIDE), ("begin transaction", StatementType.CLIENT_SIDE), ("start transaction", StatementType.CLIENT_SIDE), + ( + "begin transaction isolation level serializable", + StatementType.CLIENT_SIDE, + ), + ( + "start transaction isolation level serializable", + StatementType.CLIENT_SIDE, + ), + ( + "begin transaction isolation level repeatable read", + StatementType.CLIENT_SIDE, + ), + ( + "start transaction isolation level repeatable read", + StatementType.CLIENT_SIDE, + ), ("rollback", StatementType.CLIENT_SIDE), (" commit TRANSACTION ", StatementType.CLIENT_SIDE), (" rollback TRANSACTION ", StatementType.CLIENT_SIDE), @@ -84,6 +104,16 @@ def test_classify_stmt(self): ("udpate table set col2=1 where col1 = 2", StatementType.UNKNOWN), ("begin foo", StatementType.UNKNOWN), ("begin transaction foo", StatementType.UNKNOWN), + ("begin transaction isolation level", StatementType.UNKNOWN), + ("begin transaction repeatable read", StatementType.UNKNOWN), + ( + "begin transaction isolation level repeatable read foo", + StatementType.UNKNOWN, + ), + ( + "begin transaction isolation level unspecified", + StatementType.UNKNOWN, + ), ("commit foo", StatementType.UNKNOWN), ("commit transaction foo", StatementType.UNKNOWN), ("rollback foo", StatementType.UNKNOWN), @@ -100,6 +130,50 @@ def test_classify_stmt(self): classify_statement(query).statement_type, want_class, query ) + def test_begin_isolation_level(self): + parsed_statement = classify_statement("begin") + self.assertEqual( + parsed_statement, + ParsedStatement( + StatementType.CLIENT_SIDE, + Statement("begin"), + ClientSideStatementType.BEGIN, + [], + ), + ) + parsed_statement = classify_statement("begin isolation level serializable") + self.assertEqual( + parsed_statement, + ParsedStatement( + StatementType.CLIENT_SIDE, + Statement("begin isolation level serializable"), + ClientSideStatementType.BEGIN, + ["serializable"], + ), + ) + parsed_statement = classify_statement("begin isolation level repeatable read") + self.assertEqual( + parsed_statement, + ParsedStatement( + StatementType.CLIENT_SIDE, + Statement("begin isolation level repeatable read"), + ClientSideStatementType.BEGIN, + ["repeatable read"], + ), + ) + parsed_statement = classify_statement( + "begin isolation level repeatable read " + ) + self.assertEqual( + parsed_statement, + ParsedStatement( + StatementType.CLIENT_SIDE, + Statement("begin isolation level repeatable read"), + ClientSideStatementType.BEGIN, + ["repeatable read"], + ), + ) + def test_partition_query_classify_stmt(self): parsed_statement = classify_statement( " PARTITION SELECT s.SongName FROM Songs AS s " From beb33d21453a9c9ee4a61c79d39939355e55a3e4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 09:58:36 -0700 Subject: [PATCH 4/8] chore(python): remove noxfile.py from templates (#1335) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): remove noxfile.py from templates Source-Link: https://github.com/googleapis/synthtool/commit/776580213a73a04a3ff4fe2ed7f35c7f3d63a882 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert * remove replacements in owlbot.py * exclude noxfile.py from gapic-generator-python --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +- owlbot.py | 220 +------------------------------------- 2 files changed, 5 insertions(+), 219 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c631e1f7d7..508ba98efe 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 + digest: sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447 +# created: 2025-04-14T14:34:43.260858345Z diff --git a/owlbot.py b/owlbot.py index 40443971d1..3027a1a8ba 100644 --- a/owlbot.py +++ b/owlbot.py @@ -85,6 +85,7 @@ def get_staging_dirs( excludes=[ "google/cloud/spanner/**", "*.*", + "noxfile.py", "docs/index.rst", "google/cloud/spanner_v1/__init__.py", "**/gapic_version.py", @@ -102,7 +103,7 @@ def get_staging_dirs( ) s.move( library, - excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst", "**/gapic_version.py", "testing/constraints-3.7.txt",], + excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst", "noxfile.py", "**/gapic_version.py", "testing/constraints-3.7.txt",], ) for library in get_staging_dirs( @@ -115,7 +116,7 @@ def get_staging_dirs( ) s.move( library, - excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst", "**/gapic_version.py", "testing/constraints-3.7.txt",], + excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst", "noxfile.py", "**/gapic_version.py", "testing/constraints-3.7.txt",], ) s.remove_staging_dirs() @@ -161,219 +162,4 @@ def get_staging_dirs( python.py_samples() -# ---------------------------------------------------------------------------- -# Customize noxfile.py -# ---------------------------------------------------------------------------- - - -def place_before(path, text, *before_text, escape=None): - replacement = "\n".join(before_text) + "\n" + text - if escape: - for c in escape: - text = text.replace(c, "\\" + c) - s.replace([path], text, replacement) - - -open_telemetry_test = """ - # XXX Work around Kokoro image's older pip, which borks the OT install. - session.run("pip", "install", "--upgrade", "pip") - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - session.install("-e", ".[tracing]", "-c", constraints_path) - # XXX: Dump installed versions to debug OT issue - session.run("pip", "list") - - # Run py.test against the unit tests with OpenTelemetry. - session.run( - "py.test", - "--quiet", - "--cov=google.cloud.spanner", - "--cov=google.cloud", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) -""" - -place_before( - "noxfile.py", - "@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)", - open_telemetry_test, - escape="()", -) - -skip_tests_if_env_var_not_set = """# Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", "") and not os.environ.get( - "SPANNER_EMULATOR_HOST", "" - ): - session.skip( - "Credentials or emulator host must be set via environment variable" - ) - # If POSTGRESQL tests and Emulator, skip the tests - if os.environ.get("SPANNER_EMULATOR_HOST") and database_dialect == "POSTGRESQL": - session.skip("Postgresql is not supported by Emulator yet.") -""" - -place_before( - "noxfile.py", - "# Install pyopenssl for mTLS testing.", - skip_tests_if_env_var_not_set, - escape="()", -) - -s.replace( - "noxfile.py", - r"""session.install\("-e", "."\)""", - """session.install("-e", ".[tracing]")""", -) - -# Apply manual changes from PR https://github.com/googleapis/python-spanner/pull/759 -s.replace( - "noxfile.py", - """@nox.session\(python=SYSTEM_TEST_PYTHON_VERSIONS\) -def system\(session\):""", - """@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -@nox.parametrize( - "protobuf_implementation,database_dialect", - [ - ("python", "GOOGLE_STANDARD_SQL"), - ("python", "POSTGRESQL"), - ("upb", "GOOGLE_STANDARD_SQL"), - ("upb", "POSTGRESQL"), - ("cpp", "GOOGLE_STANDARD_SQL"), - ("cpp", "POSTGRESQL"), - ], -) -def system(session, protobuf_implementation, database_dialect):""", -) - -s.replace( - "noxfile.py", - """\*session.posargs, - \)""", - """*session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - "SPANNER_DATABASE_DIALECT": database_dialect, - "SKIP_BACKUP_TESTS": "true", - }, - )""", -) - -s.replace("noxfile.py", - """env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - },""", - """env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - "SPANNER_DATABASE_DIALECT": database_dialect, - "SKIP_BACKUP_TESTS": "true", - },""", -) - -s.replace("noxfile.py", -"""session.run\( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - \)""", -"""session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - "SPANNER_DATABASE_DIALECT": database_dialect, - "SKIP_BACKUP_TESTS": "true", - }, - )""", -) - -s.replace( - "noxfile.py", - """\@nox.session\(python="3.13"\) -\@nox.parametrize\( - "protobuf_implementation", - \[ "python", "upb", "cpp" \], -\) -def prerelease_deps\(session, protobuf_implementation\):""", - """@nox.session(python="3.13") -@nox.parametrize( - "protobuf_implementation,database_dialect", - [ - ("python", "GOOGLE_STANDARD_SQL"), - ("python", "POSTGRESQL"), - ("upb", "GOOGLE_STANDARD_SQL"), - ("upb", "POSTGRESQL"), - ("cpp", "GOOGLE_STANDARD_SQL"), - ("cpp", "POSTGRESQL"), - ], -) -def prerelease_deps(session, protobuf_implementation, database_dialect):""", -) - - -mockserver_test = """ -@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION) -def mockserver(session): - # Install all test dependencies, then install this package in-place. - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - # install_unittest_dependencies(session, "-c", constraints_path) - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) - - # Run py.test against the mockserver tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "mockserver_tests"), - *session.posargs, - ) - -""" - -place_before( - "noxfile.py", - "def install_systemtest_dependencies(session, *constraints):", - mockserver_test, - escape="()_*:", -) - -s.replace( - "noxfile.py", - "install_systemtest_dependencies\(session, \"-c\", constraints_path\)", - """install_systemtest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/synthtool/issues/1976): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") -""" -) - -place_before( - "noxfile.py", - "UNIT_TEST_PYTHON_VERSIONS: List[str] = [", - 'DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12"', - escape="[]", -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) From ca76108809174e4f3eea38d7ac2463d9b4c73304 Mon Sep 17 00:00:00 2001 From: aksharauke <126752897+aksharauke@users.noreply.github.com> Date: Tue, 22 Apr 2025 12:01:57 +0530 Subject: [PATCH 5/8] feat: add sample for pre-split feature (#1333) * feat: add sample for pre-split feature * build error fixes * build failure fixes * build fixes * lint fixes * fixes lint * fixed the build error * fixed the build error * chore: fix positional argument issue Signed-off-by: Sri Harsha CH * fixed the index test case * added comment on the splits for idex keys * fixed indent * lint fixes * lint fixes * chore: tests fix Signed-off-by: Sri Harsha CH * chore: update sample to not change editions due to failing test case Signed-off-by: Sri Harsha CH --------- Signed-off-by: Sri Harsha CH Co-authored-by: Sri Harsha CH Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> --- samples/samples/snippets.py | 94 +++++++++++++++++++++++++++++++- samples/samples/snippets_test.py | 7 +++ 2 files changed, 100 insertions(+), 1 deletion(-) diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 6650ebe88d..e8e82ad920 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -33,6 +33,7 @@ from google.cloud.spanner_v1 import DirectedReadOptions, param_types from google.cloud.spanner_v1.data_types import JsonObject from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from testdata import singer_pb2 @@ -90,7 +91,7 @@ def update_instance(instance_id): labels={ "sample_name": "snippets-update_instance-explicit", }, - edition=spanner_instance_admin.Instance.Edition.ENTERPRISE, # Optional + edition=spanner_instance_admin.Instance.Edition.STANDARD, # Optional ), field_mask=field_mask_pb2.FieldMask(paths=["labels", "edition"]), ) @@ -3204,6 +3205,7 @@ def create_instance_with_autoscaling_config(instance_id): "sample_name": "snippets-create_instance_with_autoscaling_config", "created": str(int(time.time())), }, + edition=spanner_instance_admin.Instance.Edition.ENTERPRISE, # Optional ), ) @@ -3509,6 +3511,90 @@ def query_data_with_proto_types_parameter(instance_id, database_id): # [END spanner_query_with_proto_types_parameter] +# [START spanner_database_add_split_points] +def add_split_points(instance_id, database_id): + """Adds split points to table and index.""" + + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin + + spanner_client = spanner.Client() + database_admin_api = spanner_client.database_admin_api + + request = spanner_database_admin.UpdateDatabaseDdlRequest( + database=database_admin_api.database_path( + spanner_client.project, instance_id, database_id + ), + statements=["CREATE INDEX IF NOT EXISTS SingersByFirstLastName ON Singers(FirstName, LastName)"], + ) + + operation = database_admin_api.update_database_ddl(request) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print("Added the SingersByFirstLastName index.") + + addSplitPointRequest = spanner_database_admin.AddSplitPointsRequest( + database=database_admin_api.database_path( + spanner_client.project, instance_id, database_id + ), + # Table split + # Index split without table key part + # Index split with table key part: first key is the index key and second the table key + split_points=[ + spanner_database_admin.SplitPoints( + table="Singers", + keys=[ + spanner_database_admin.SplitPoints.Key( + key_parts=struct_pb2.ListValue( + values=[struct_pb2.Value(string_value="42")] + ) + ) + ], + ), + spanner_database_admin.SplitPoints( + index="SingersByFirstLastName", + keys=[ + spanner_database_admin.SplitPoints.Key( + key_parts=struct_pb2.ListValue( + values=[ + struct_pb2.Value(string_value="John"), + struct_pb2.Value(string_value="Doe"), + ] + ) + ) + ], + ), + spanner_database_admin.SplitPoints( + index="SingersByFirstLastName", + keys=[ + spanner_database_admin.SplitPoints.Key( + key_parts=struct_pb2.ListValue( + values=[ + struct_pb2.Value(string_value="Jane"), + struct_pb2.Value(string_value="Doe"), + ] + ) + ), + spanner_database_admin.SplitPoints.Key( + key_parts=struct_pb2.ListValue( + values=[struct_pb2.Value(string_value="38")] + ) + ), + + ], + ), + ], + ) + + operation = database_admin_api.add_split_points(addSplitPointRequest) + + print("Added split points.") + + +# [END spanner_database_add_split_points] + + if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter @@ -3666,6 +3752,10 @@ def query_data_with_proto_types_parameter(instance_id, database_id): "query_data_with_proto_types_parameter", help=query_data_with_proto_types_parameter.__doc__, ) + subparsers.add_parser( + "add_split_points", + help=add_split_points.__doc__, + ) args = parser.parse_args() @@ -3815,3 +3905,5 @@ def query_data_with_proto_types_parameter(instance_id, database_id): update_data_with_proto_types_with_dml(args.instance_id, args.database_id) elif args.command == "query_data_with_proto_types_parameter": query_data_with_proto_types_parameter(args.instance_id, args.database_id) + elif args.command == "add_split_points": + add_split_points(args.instance_id, args.database_id) diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 87fa7a43a2..eb61e8bd1f 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -1009,3 +1009,10 @@ def test_query_data_with_proto_types_parameter( ) out, _ = capsys.readouterr() assert "SingerId: 2, SingerInfo: singer_id: 2" in out + + +@pytest.mark.dependency(name="add_split_points", depends=["insert_data"]) +def test_add_split_points(capsys, instance_id, sample_database): + snippets.add_split_points(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Added split points." in out From a8f38cdc02acca2a29563707d91d760b76859c77 Mon Sep 17 00:00:00 2001 From: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> Date: Tue, 22 Apr 2025 14:14:32 +0530 Subject: [PATCH 6/8] chore: sample fix with increased timeout (#1339) Signed-off-by: Sri Harsha CH --- samples/samples/snippets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index e8e82ad920..4b4d7b5a2e 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -91,13 +91,13 @@ def update_instance(instance_id): labels={ "sample_name": "snippets-update_instance-explicit", }, - edition=spanner_instance_admin.Instance.Edition.STANDARD, # Optional + edition=spanner_instance_admin.Instance.Edition.ENTERPRISE, # Optional ), field_mask=field_mask_pb2.FieldMask(paths=["labels", "edition"]), ) print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) + operation.result(900) print("Updated instance {}".format(instance_id)) From 6ca9b43c3038eca1317c7c9b7e3543b5f1bc68ad Mon Sep 17 00:00:00 2001 From: rahul2393 Date: Mon, 28 Apr 2025 20:47:08 +0530 Subject: [PATCH 7/8] feat: add interval type support (#1340) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(spanner): add interval type support * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix test * fix build * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * incorporate suggestions * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/spanner_v1/__init__.py | 3 +- google/cloud/spanner_v1/_helpers.py | 13 +- google/cloud/spanner_v1/data_types.py | 149 +++++++- google/cloud/spanner_v1/param_types.py | 1 + google/cloud/spanner_v1/streamed.py | 1 + tests/system/_helpers.py | 13 +- tests/system/conftest.py | 13 +- tests/system/test_session_api.py | 207 +++++++++++ tests/unit/test__helpers.py | 481 +++++++++++++++++++++++++ tests/unit/test_metrics.py | 1 - 10 files changed, 874 insertions(+), 8 deletions(-) diff --git a/google/cloud/spanner_v1/__init__.py b/google/cloud/spanner_v1/__init__.py index beeed1dacf..48b11d9342 100644 --- a/google/cloud/spanner_v1/__init__.py +++ b/google/cloud/spanner_v1/__init__.py @@ -63,7 +63,7 @@ from .types.type import Type from .types.type import TypeAnnotationCode from .types.type import TypeCode -from .data_types import JsonObject +from .data_types import JsonObject, Interval from .transaction import BatchTransactionId, DefaultTransactionOptions from google.cloud.spanner_v1 import param_types @@ -145,6 +145,7 @@ "TypeCode", # Custom spanner related data types "JsonObject", + "Interval", # google.cloud.spanner_v1.services "SpannerClient", "SpannerAsyncClient", diff --git a/google/cloud/spanner_v1/_helpers.py b/google/cloud/spanner_v1/_helpers.py index d1f64db2d8..73a7679a6e 100644 --- a/google/cloud/spanner_v1/_helpers.py +++ b/google/cloud/spanner_v1/_helpers.py @@ -31,7 +31,7 @@ from google.cloud._helpers import _date_from_iso8601_date from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1 import ExecuteSqlRequest -from google.cloud.spanner_v1 import JsonObject +from google.cloud.spanner_v1 import JsonObject, Interval from google.cloud.spanner_v1 import TransactionOptions from google.cloud.spanner_v1.request_id_header import with_request_id from google.rpc.error_details_pb2 import RetryInfo @@ -251,6 +251,8 @@ def _make_value_pb(value): return Value(null_value="NULL_VALUE") else: return Value(string_value=base64.b64encode(value)) + if isinstance(value, Interval): + return Value(string_value=str(value)) raise ValueError("Unknown type: %s" % (value,)) @@ -367,6 +369,8 @@ def _get_type_decoder(field_type, field_name, column_info=None): for item_field in field_type.struct_type.fields ] return lambda value_pb: _parse_struct(value_pb, element_decoders) + elif type_code == TypeCode.INTERVAL: + return _parse_interval else: raise ValueError("Unknown type: %s" % (field_type,)) @@ -473,6 +477,13 @@ def _parse_nullable(value_pb, decoder): return decoder(value_pb) +def _parse_interval(value_pb): + """Parse a Value protobuf containing an interval.""" + if hasattr(value_pb, "string_value"): + return Interval.from_str(value_pb.string_value) + return Interval.from_str(value_pb) + + class _SessionWrapper(object): """Base class for objects wrapping a session. diff --git a/google/cloud/spanner_v1/data_types.py b/google/cloud/spanner_v1/data_types.py index 6b1ba5df49..6703f359e9 100644 --- a/google/cloud/spanner_v1/data_types.py +++ b/google/cloud/spanner_v1/data_types.py @@ -16,7 +16,8 @@ import json import types - +import re +from dataclasses import dataclass from google.protobuf.message import Message from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper @@ -97,6 +98,152 @@ def serialize(self): return json.dumps(self, sort_keys=True, separators=(",", ":")) +@dataclass +class Interval: + """Represents a Spanner INTERVAL type. + + An interval is a combination of months, days and nanoseconds. + Internally, Spanner supports Interval value with the following range of individual fields: + months: [-120000, 120000] + days: [-3660000, 3660000] + nanoseconds: [-316224000000000000000, 316224000000000000000] + """ + + months: int = 0 + days: int = 0 + nanos: int = 0 + + def __str__(self) -> str: + """Returns the ISO8601 duration format string representation.""" + result = ["P"] + + # Handle years and months + if self.months: + is_negative = self.months < 0 + abs_months = abs(self.months) + years, months = divmod(abs_months, 12) + if years: + result.append(f"{'-' if is_negative else ''}{years}Y") + if months: + result.append(f"{'-' if is_negative else ''}{months}M") + + # Handle days + if self.days: + result.append(f"{self.days}D") + + # Handle time components + if self.nanos: + result.append("T") + nanos = abs(self.nanos) + is_negative = self.nanos < 0 + + # Convert to hours, minutes, seconds + nanos_per_hour = 3600000000000 + hours, nanos = divmod(nanos, nanos_per_hour) + if hours: + if is_negative: + result.append("-") + result.append(f"{hours}H") + + nanos_per_minute = 60000000000 + minutes, nanos = divmod(nanos, nanos_per_minute) + if minutes: + if is_negative: + result.append("-") + result.append(f"{minutes}M") + + nanos_per_second = 1000000000 + seconds, nanos_fraction = divmod(nanos, nanos_per_second) + + if seconds or nanos_fraction: + if is_negative: + result.append("-") + if seconds: + result.append(str(seconds)) + elif nanos_fraction: + result.append("0") + + if nanos_fraction: + nano_str = f"{nanos_fraction:09d}" + trimmed = nano_str.rstrip("0") + if len(trimmed) <= 3: + while len(trimmed) < 3: + trimmed += "0" + elif len(trimmed) <= 6: + while len(trimmed) < 6: + trimmed += "0" + else: + while len(trimmed) < 9: + trimmed += "0" + result.append(f".{trimmed}") + result.append("S") + + if len(result) == 1: + result.append("0Y") # Special case for zero interval + + return "".join(result) + + @classmethod + def from_str(cls, s: str) -> "Interval": + """Parse an ISO8601 duration format string into an Interval.""" + pattern = r"^P(-?\d+Y)?(-?\d+M)?(-?\d+D)?(T(-?\d+H)?(-?\d+M)?(-?((\d+([.,]\d{1,9})?)|([.,]\d{1,9}))S)?)?$" + match = re.match(pattern, s) + if not match or len(s) == 1: + raise ValueError(f"Invalid interval format: {s}") + + parts = match.groups() + if not any(parts[:3]) and not parts[3]: + raise ValueError( + f"Invalid interval format: at least one component (Y/M/D/H/M/S) is required: {s}" + ) + + if parts[3] == "T" and not any(parts[4:7]): + raise ValueError( + f"Invalid interval format: time designator 'T' present but no time components specified: {s}" + ) + + def parse_num(s: str, suffix: str) -> int: + if not s: + return 0 + return int(s.rstrip(suffix)) + + years = parse_num(parts[0], "Y") + months = parse_num(parts[1], "M") + total_months = years * 12 + months + + days = parse_num(parts[2], "D") + + nanos = 0 + if parts[3]: # Has time component + # Convert hours to nanoseconds + hours = parse_num(parts[4], "H") + nanos += hours * 3600000000000 + + # Convert minutes to nanoseconds + minutes = parse_num(parts[5], "M") + nanos += minutes * 60000000000 + + # Handle seconds and fractional seconds + if parts[6]: + seconds = parts[6].rstrip("S") + if "," in seconds: + seconds = seconds.replace(",", ".") + + if "." in seconds: + sec_parts = seconds.split(".") + whole_seconds = sec_parts[0] if sec_parts[0] else "0" + nanos += int(whole_seconds) * 1000000000 + frac = sec_parts[1][:9].ljust(9, "0") + frac_nanos = int(frac) + if seconds.startswith("-"): + frac_nanos = -frac_nanos + nanos += frac_nanos + else: + nanos += int(seconds) * 1000000000 + + return cls(months=total_months, days=days, nanos=nanos) + + def _proto_message(bytes_val, proto_message_object): """Helper for :func:`get_proto_message`. parses serialized protocol buffer bytes data into proto message. diff --git a/google/cloud/spanner_v1/param_types.py b/google/cloud/spanner_v1/param_types.py index 5416a26d61..72127c0e0b 100644 --- a/google/cloud/spanner_v1/param_types.py +++ b/google/cloud/spanner_v1/param_types.py @@ -36,6 +36,7 @@ PG_NUMERIC = Type(code=TypeCode.NUMERIC, type_annotation=TypeAnnotationCode.PG_NUMERIC) PG_JSONB = Type(code=TypeCode.JSON, type_annotation=TypeAnnotationCode.PG_JSONB) PG_OID = Type(code=TypeCode.INT64, type_annotation=TypeAnnotationCode.PG_OID) +INTERVAL = Type(code=TypeCode.INTERVAL) def Array(element_type): diff --git a/google/cloud/spanner_v1/streamed.py b/google/cloud/spanner_v1/streamed.py index 7c067e97b6..5de843e103 100644 --- a/google/cloud/spanner_v1/streamed.py +++ b/google/cloud/spanner_v1/streamed.py @@ -391,6 +391,7 @@ def _merge_struct(lhs, rhs, type_): TypeCode.NUMERIC: _merge_string, TypeCode.JSON: _merge_string, TypeCode.PROTO: _merge_string, + TypeCode.INTERVAL: _merge_string, TypeCode.ENUM: _merge_string, } diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py index f157a8ee59..f37aefc2e5 100644 --- a/tests/system/_helpers.py +++ b/tests/system/_helpers.py @@ -115,9 +115,20 @@ def scrub_instance_ignore_not_found(to_scrub): """Helper for func:`cleanup_old_instances`""" scrub_instance_backups(to_scrub) + for database_pb in to_scrub.list_databases(): + db = to_scrub.database(database_pb.name.split("/")[-1]) + db.reload() + try: + if db.enable_drop_protection: + db.enable_drop_protection = False + operation = db.update(["enable_drop_protection"]) + operation.result(DATABASE_OPERATION_TIMEOUT_IN_SECONDS) + except exceptions.NotFound: + pass + try: retry_429_503(to_scrub.delete)() - except exceptions.NotFound: # lost the race + except exceptions.NotFound: pass diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 1337de4972..bc94d065b2 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -151,10 +151,17 @@ def instance_config(instance_configs): if not instance_configs: raise ValueError("No instance configs found.") - us_west1_config = [ - config for config in instance_configs if config.display_name == "us-west1" + import random + + us_configs = [ + config + for config in instance_configs + if config.display_name in ["us-south1", "us-east4"] ] - config = us_west1_config[0] if len(us_west1_config) > 0 else instance_configs[0] + + config = ( + random.choice(us_configs) if us_configs else random.choice(instance_configs) + ) yield config diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 4de0e681f6..73b55b035d 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -2907,3 +2907,210 @@ def _check_batch_status(status_code, expected=code_pb2.OK): raise exceptions.from_grpc_status( grpc_status_code, "batch_update failed", errors=[call] ) + + +def get_param_info(param_names, database_dialect): + keys = [f"p{i + 1}" for i in range(len(param_names))] + if database_dialect == DatabaseDialect.POSTGRESQL: + placeholders = [f"${i + 1}" for i in range(len(param_names))] + else: + placeholders = [f"@p{i + 1}" for i in range(len(param_names))] + return keys, placeholders + + +def test_interval(sessions_database, database_dialect, not_emulator): + from google.cloud.spanner_v1 import Interval + + def setup_table(): + if database_dialect == DatabaseDialect.POSTGRESQL: + sessions_database.update_ddl( + [ + """ + CREATE TABLE IntervalTable ( + key text primary key, + create_time timestamptz, + expiry_time timestamptz, + expiry_within_month bool GENERATED ALWAYS AS (expiry_time - create_time < INTERVAL '30' DAY) STORED, + interval_array_len bigint GENERATED ALWAYS AS (ARRAY_LENGTH(ARRAY[INTERVAL '1-2 3 4:5:6'], 1)) STORED + ) + """ + ] + ).result() + else: + sessions_database.update_ddl( + [ + """ + CREATE TABLE IntervalTable ( + key STRING(MAX), + create_time TIMESTAMP, + expiry_time TIMESTAMP, + expiry_within_month bool AS (expiry_time - create_time < INTERVAL 30 DAY), + interval_array_len INT64 AS (ARRAY_LENGTH(ARRAY[INTERVAL '1-2 3 4:5:6' YEAR TO SECOND])) + ) PRIMARY KEY (key) + """ + ] + ).result() + + def insert_test1(transaction): + keys, placeholders = get_param_info( + ["key", "create_time", "expiry_time"], database_dialect + ) + transaction.execute_update( + f""" + INSERT INTO IntervalTable (key, create_time, expiry_time) + VALUES ({placeholders[0]}, {placeholders[1]}, {placeholders[2]}) + """, + params={ + keys[0]: "test1", + keys[1]: datetime.datetime(2004, 11, 30, 4, 53, 54, tzinfo=UTC), + keys[2]: datetime.datetime(2004, 12, 15, 4, 53, 54, tzinfo=UTC), + }, + param_types={ + keys[0]: spanner_v1.param_types.STRING, + keys[1]: spanner_v1.param_types.TIMESTAMP, + keys[2]: spanner_v1.param_types.TIMESTAMP, + }, + ) + + def insert_test2(transaction): + keys, placeholders = get_param_info( + ["key", "create_time", "expiry_time"], database_dialect + ) + transaction.execute_update( + f""" + INSERT INTO IntervalTable (key, create_time, expiry_time) + VALUES ({placeholders[0]}, {placeholders[1]}, {placeholders[2]}) + """, + params={ + keys[0]: "test2", + keys[1]: datetime.datetime(2004, 8, 30, 4, 53, 54, tzinfo=UTC), + keys[2]: datetime.datetime(2004, 12, 15, 4, 53, 54, tzinfo=UTC), + }, + param_types={ + keys[0]: spanner_v1.param_types.STRING, + keys[1]: spanner_v1.param_types.TIMESTAMP, + keys[2]: spanner_v1.param_types.TIMESTAMP, + }, + ) + + def test_computed_columns(transaction): + keys, placeholders = get_param_info(["key"], database_dialect) + results = list( + transaction.execute_sql( + f""" + SELECT expiry_within_month, interval_array_len + FROM IntervalTable + WHERE key = {placeholders[0]}""", + params={keys[0]: "test1"}, + param_types={keys[0]: spanner_v1.param_types.STRING}, + ) + ) + assert len(results) == 1 + row = results[0] + assert row[0] is True # expiry_within_month + assert row[1] == 1 # interval_array_len + + def test_interval_arithmetic(transaction): + results = list( + transaction.execute_sql( + "SELECT INTERVAL '1' DAY + INTERVAL '1' MONTH AS Col1" + ) + ) + assert len(results) == 1 + row = results[0] + interval = row[0] + assert interval.months == 1 + assert interval.days == 1 + assert interval.nanos == 0 + + def test_interval_timestamp_comparison(transaction): + timestamp = "2004-11-30T10:23:54+0530" + keys, placeholders = get_param_info(["interval"], database_dialect) + if database_dialect == DatabaseDialect.POSTGRESQL: + query = f"SELECT COUNT(*) FROM IntervalTable WHERE create_time < TIMESTAMPTZ '%s' - {placeholders[0]}" + else: + query = f"SELECT COUNT(*) FROM IntervalTable WHERE create_time < TIMESTAMP('%s') - {placeholders[0]}" + + results = list( + transaction.execute_sql( + query % timestamp, + params={keys[0]: Interval(days=30)}, + param_types={keys[0]: spanner_v1.param_types.INTERVAL}, + ) + ) + assert len(results) == 1 + assert results[0][0] == 1 + + def test_interval_array_param(transaction): + intervals = [ + Interval(months=14, days=3, nanos=14706000000000), + Interval(), + Interval(months=-14, days=-3, nanos=-14706000000000), + None, + ] + keys, placeholders = get_param_info(["intervals"], database_dialect) + array_type = spanner_v1.Type( + code=spanner_v1.TypeCode.ARRAY, + array_element_type=spanner_v1.param_types.INTERVAL, + ) + results = list( + transaction.execute_sql( + f"SELECT {placeholders[0]}", + params={keys[0]: intervals}, + param_types={keys[0]: array_type}, + ) + ) + assert len(results) == 1 + row = results[0] + intervals = row[0] + assert len(intervals) == 4 + + assert intervals[0].months == 14 + assert intervals[0].days == 3 + assert intervals[0].nanos == 14706000000000 + + assert intervals[1].months == 0 + assert intervals[1].days == 0 + assert intervals[1].nanos == 0 + + assert intervals[2].months == -14 + assert intervals[2].days == -3 + assert intervals[2].nanos == -14706000000000 + + assert intervals[3] is None + + def test_interval_array_cast(transaction): + results = list( + transaction.execute_sql( + """ + SELECT ARRAY[ + CAST('P1Y2M3DT4H5M6.789123S' AS INTERVAL), + null, + CAST('P-1Y-2M-3DT-4H-5M-6.789123S' AS INTERVAL) + ] AS Col1 + """ + ) + ) + assert len(results) == 1 + row = results[0] + intervals = row[0] + assert len(intervals) == 3 + + assert intervals[0].months == 14 # 1 year + 2 months + assert intervals[0].days == 3 + assert intervals[0].nanos == 14706789123000 # 4h5m6.789123s in nanos + + assert intervals[1] is None + + assert intervals[2].months == -14 + assert intervals[2].days == -3 + assert intervals[2].nanos == -14706789123000 + + setup_table() + sessions_database.run_in_transaction(insert_test1) + sessions_database.run_in_transaction(test_computed_columns) + sessions_database.run_in_transaction(test_interval_arithmetic) + sessions_database.run_in_transaction(insert_test2) + sessions_database.run_in_transaction(test_interval_timestamp_comparison) + sessions_database.run_in_transaction(test_interval_array_param) + sessions_database.run_in_transaction(test_interval_array_cast) diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index bd861cc8eb..7010affdd2 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -1036,3 +1036,484 @@ def test_default_isolation_and_merge_options_isolation_unspecified(self): ) result = self._callFUT(default, merge) self.assertEqual(result, expected) + + +class Test_interval(unittest.TestCase): + from google.protobuf.struct_pb2 import Value + from google.cloud.spanner_v1 import Interval + from google.cloud.spanner_v1 import Type + from google.cloud.spanner_v1 import TypeCode + + def _callFUT(self, *args, **kw): + from google.cloud.spanner_v1._helpers import _make_value_pb + + return _make_value_pb(*args, **kw) + + def test_interval_cases(self): + test_cases = [ + { + "name": "Basic interval", + "interval": self.Interval(months=14, days=3, nanos=43926789000123), + "expected": "P1Y2M3DT12H12M6.789000123S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Months only", + "interval": self.Interval(months=10, days=0, nanos=0), + "expected": "P10M", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Days only", + "interval": self.Interval(months=0, days=10, nanos=0), + "expected": "P10D", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Seconds only", + "interval": self.Interval(months=0, days=0, nanos=10000000000), + "expected": "PT10S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Milliseconds only", + "interval": self.Interval(months=0, days=0, nanos=10000000), + "expected": "PT0.010S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Microseconds only", + "interval": self.Interval(months=0, days=0, nanos=10000), + "expected": "PT0.000010S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Nanoseconds only", + "interval": self.Interval(months=0, days=0, nanos=10), + "expected": "PT0.000000010S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Mixed components", + "interval": self.Interval(months=10, days=20, nanos=1030), + "expected": "P10M20DT0.000001030S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Mixed components with negative nanos", + "interval": self.Interval(months=10, days=20, nanos=-1030), + "expected": "P10M20DT-0.000001030S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Negative interval", + "interval": self.Interval(months=-14, days=-3, nanos=-43926789000123), + "expected": "P-1Y-2M-3DT-12H-12M-6.789000123S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Mixed signs", + "interval": self.Interval(months=10, days=3, nanos=-41401234000000), + "expected": "P10M3DT-11H-30M-1.234S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Large values", + "interval": self.Interval( + months=25, days=15, nanos=316223999999999999999 + ), + "expected": "P2Y1M15DT87839999H59M59.999999999S", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + { + "name": "Zero interval", + "interval": self.Interval(months=0, days=0, nanos=0), + "expected": "P0Y", + "expected_type": self.Type(code=self.TypeCode.INTERVAL), + }, + ] + + for case in test_cases: + with self.subTest(name=case["name"]): + value_pb = self._callFUT(case["interval"]) + self.assertIsInstance(value_pb, self.Value) + self.assertEqual(value_pb.string_value, case["expected"]) + # TODO: Add type checking once we have access to the type information + + +class Test_parse_interval(unittest.TestCase): + from google.protobuf.struct_pb2 import Value + + def _callFUT(self, *args, **kw): + from google.cloud.spanner_v1._helpers import _parse_interval + + return _parse_interval(*args, **kw) + + def test_parse_interval_cases(self): + test_cases = [ + { + "name": "full interval with all components", + "input": "P1Y2M3DT12H12M6.789000123S", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": 43926789000123, + "want_err": False, + }, + { + "name": "interval with negative minutes", + "input": "P1Y2M3DT13H-48M6S", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": 43926000000000, + "want_err": False, + }, + { + "name": "date only interval", + "input": "P1Y2M3D", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": 0, + "want_err": False, + }, + { + "name": "years and months only", + "input": "P1Y2M", + "expected_months": 14, + "expected_days": 0, + "expected_nanos": 0, + "want_err": False, + }, + { + "name": "years only", + "input": "P1Y", + "expected_months": 12, + "expected_days": 0, + "expected_nanos": 0, + "want_err": False, + }, + { + "name": "months only", + "input": "P2M", + "expected_months": 2, + "expected_days": 0, + "expected_nanos": 0, + "want_err": False, + }, + { + "name": "days only", + "input": "P3D", + "expected_months": 0, + "expected_days": 3, + "expected_nanos": 0, + "want_err": False, + }, + { + "name": "time components with fractional seconds", + "input": "PT4H25M6.7890001S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 15906789000100, + "want_err": False, + }, + { + "name": "time components without fractional seconds", + "input": "PT4H25M6S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 15906000000000, + "want_err": False, + }, + { + "name": "hours and seconds only", + "input": "PT4H30S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 14430000000000, + "want_err": False, + }, + { + "name": "hours and minutes only", + "input": "PT4H1M", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 14460000000000, + "want_err": False, + }, + { + "name": "minutes only", + "input": "PT5M", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 300000000000, + "want_err": False, + }, + { + "name": "fractional seconds only", + "input": "PT6.789S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 6789000000, + "want_err": False, + }, + { + "name": "small fractional seconds", + "input": "PT0.123S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 123000000, + "want_err": False, + }, + { + "name": "very small fractional seconds", + "input": "PT.000000123S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 123, + "want_err": False, + }, + { + "name": "zero years", + "input": "P0Y", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 0, + "want_err": False, + }, + { + "name": "all negative components", + "input": "P-1Y-2M-3DT-12H-12M-6.789000123S", + "expected_months": -14, + "expected_days": -3, + "expected_nanos": -43926789000123, + "want_err": False, + }, + { + "name": "mixed signs in components", + "input": "P1Y-2M3DT13H-51M6.789S", + "expected_months": 10, + "expected_days": 3, + "expected_nanos": 43746789000000, + "want_err": False, + }, + { + "name": "negative years with mixed signs", + "input": "P-1Y2M-3DT-13H49M-6.789S", + "expected_months": -10, + "expected_days": -3, + "expected_nanos": -43866789000000, + "want_err": False, + }, + { + "name": "negative time components", + "input": "P1Y2M3DT-4H25M-6.7890001S", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": -12906789000100, + "want_err": False, + }, + { + "name": "large time values", + "input": "PT100H100M100.5S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 366100500000000, + "want_err": False, + }, + { + "name": "only time components with seconds", + "input": "PT12H30M1S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 45001000000000, + "want_err": False, + }, + { + "name": "date and time no seconds", + "input": "P1Y2M3DT12H30M", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": 45000000000000, + "want_err": False, + }, + { + "name": "fractional seconds with max digits", + "input": "PT0.123456789S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 123456789, + "want_err": False, + }, + { + "name": "hours and fractional seconds", + "input": "PT1H0.5S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 3600500000000, + "want_err": False, + }, + { + "name": "years and months to months with fractional seconds", + "input": "P1Y2M3DT12H30M1.23456789S", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": 45001234567890, + "want_err": False, + }, + { + "name": "comma as decimal point", + "input": "P1Y2M3DT12H30M1,23456789S", + "expected_months": 14, + "expected_days": 3, + "expected_nanos": 45001234567890, + "want_err": False, + }, + { + "name": "fractional seconds without 0 before decimal", + "input": "PT.5S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 500000000, + "want_err": False, + }, + { + "name": "mixed signs", + "input": "P-1Y2M3DT12H-30M1.234S", + "expected_months": -10, + "expected_days": 3, + "expected_nanos": 41401234000000, + "want_err": False, + }, + { + "name": "more mixed signs", + "input": "P1Y-2M3DT-12H30M-1.234S", + "expected_months": 10, + "expected_days": 3, + "expected_nanos": -41401234000000, + "want_err": False, + }, + { + "name": "trailing zeros after decimal", + "input": "PT1.234000S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 1234000000, + "want_err": False, + }, + { + "name": "all zeros after decimal", + "input": "PT1.000S", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 1000000000, + "want_err": False, + }, + # Invalid cases + {"name": "invalid format", "input": "invalid", "want_err": True}, + {"name": "missing duration specifier", "input": "P", "want_err": True}, + {"name": "missing time components", "input": "PT", "want_err": True}, + {"name": "missing unit specifier", "input": "P1YM", "want_err": True}, + {"name": "missing T separator", "input": "P1Y2M3D4H5M6S", "want_err": True}, + { + "name": "missing decimal value", + "input": "P1Y2M3DT4H5M6.S", + "want_err": True, + }, + { + "name": "extra unit specifier", + "input": "P1Y2M3DT4H5M6.789SS", + "want_err": True, + }, + { + "name": "missing value after decimal", + "input": "P1Y2M3DT4H5M6.", + "want_err": True, + }, + { + "name": "non-digit after decimal", + "input": "P1Y2M3DT4H5M6.ABC", + "want_err": True, + }, + {"name": "missing unit", "input": "P1Y2M3", "want_err": True}, + {"name": "missing time value", "input": "P1Y2M3DT", "want_err": True}, + { + "name": "invalid negative sign position", + "input": "P-T1H", + "want_err": True, + }, + {"name": "trailing negative sign", "input": "PT1H-", "want_err": True}, + { + "name": "too many decimal places", + "input": "P1Y2M3DT4H5M6.789123456789S", + "want_err": True, + }, + { + "name": "multiple decimal points", + "input": "P1Y2M3DT4H5M6.123.456S", + "want_err": True, + }, + { + "name": "both dot and comma decimals", + "input": "P1Y2M3DT4H5M6.,789S", + "want_err": True, + }, + ] + + for case in test_cases: + with self.subTest(name=case["name"]): + value_pb = self.Value(string_value=case["input"]) + if case.get("want_err", False): + with self.assertRaises(ValueError): + self._callFUT(value_pb) + else: + result = self._callFUT(value_pb) + self.assertEqual(result.months, case["expected_months"]) + self.assertEqual(result.days, case["expected_days"]) + self.assertEqual(result.nanos, case["expected_nanos"]) + + def test_large_values(self): + large_test_cases = [ + { + "name": "large positive hours", + "input": "PT87840000H", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": 316224000000000000000, + "want_err": False, + }, + { + "name": "large negative hours", + "input": "PT-87840000H", + "expected_months": 0, + "expected_days": 0, + "expected_nanos": -316224000000000000000, + "want_err": False, + }, + { + "name": "large mixed values with max precision", + "input": "P2Y1M15DT87839999H59M59.999999999S", + "expected_months": 25, + "expected_days": 15, + "expected_nanos": 316223999999999999999, + "want_err": False, + }, + { + "name": "large mixed negative values with max precision", + "input": "P2Y1M15DT-87839999H-59M-59.999999999S", + "expected_months": 25, + "expected_days": 15, + "expected_nanos": -316223999999999999999, + "want_err": False, + }, + ] + + for case in large_test_cases: + with self.subTest(name=case["name"]): + value_pb = self.Value(string_value=case["input"]) + if case.get("want_err", False): + with self.assertRaises(ValueError): + self._callFUT(value_pb) + else: + result = self._callFUT(value_pb) + self.assertEqual(result.months, case["expected_months"]) + self.assertEqual(result.days, case["expected_days"]) + self.assertEqual(result.nanos, case["expected_nanos"]) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index cd5ca2e6fc..bb2695553b 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -65,7 +65,6 @@ def mocked_call(*args, **kwargs): return _UnaryOutcome(MagicMock(), MagicMock()) def intercept_wrapper(invoked_method, request_or_iterator, call_details): - nonlocal original_intercept nonlocal first_attempt invoked_method = mocked_call if first_attempt: From 933114619aab9746d92d81bde8c18c2ef972369c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 13:11:56 -0700 Subject: [PATCH 8/8] chore(main): release 3.54.0 (#1330) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 15 +++++++++++++++ .../spanner_admin_database_v1/gapic_version.py | 2 +- .../spanner_admin_instance_v1/gapic_version.py | 2 +- google/cloud/spanner_v1/gapic_version.py | 2 +- ...metadata_google.spanner.admin.database.v1.json | 2 +- ...metadata_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- 8 files changed, 22 insertions(+), 7 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 00d392a248..62c031f3f8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.53.0" + ".": "3.54.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 0bde684970..ee56542822 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## [3.54.0](https://github.com/googleapis/python-spanner/compare/v3.53.0...v3.54.0) (2025-04-28) + + +### Features + +* Add interval type support ([#1340](https://github.com/googleapis/python-spanner/issues/1340)) ([6ca9b43](https://github.com/googleapis/python-spanner/commit/6ca9b43c3038eca1317c7c9b7e3543b5f1bc68ad)) +* Add sample for pre-split feature ([#1333](https://github.com/googleapis/python-spanner/issues/1333)) ([ca76108](https://github.com/googleapis/python-spanner/commit/ca76108809174e4f3eea38d7ac2463d9b4c73304)) +* Add SQL statement for begin transaction isolation level ([#1331](https://github.com/googleapis/python-spanner/issues/1331)) ([3ac0f91](https://github.com/googleapis/python-spanner/commit/3ac0f9131b38e5cfb2b574d3d73b03736b871712)) +* Support transaction isolation level in dbapi ([#1327](https://github.com/googleapis/python-spanner/issues/1327)) ([03400c4](https://github.com/googleapis/python-spanner/commit/03400c40f1c1cc73e51733f2a28910a8dd78e7d9)) + + +### Bug Fixes + +* Improve client-side regex statement parser ([#1328](https://github.com/googleapis/python-spanner/issues/1328)) ([b3c259d](https://github.com/googleapis/python-spanner/commit/b3c259deec817812fd8e4940faacf4a927d0d69c)) + ## [3.53.0](https://github.com/googleapis/python-spanner/compare/v3.52.0...v3.53.0) (2025-03-12) diff --git a/google/cloud/spanner_admin_database_v1/gapic_version.py b/google/cloud/spanner_admin_database_v1/gapic_version.py index 9b205942db..9f7e08d550 100644 --- a/google/cloud/spanner_admin_database_v1/gapic_version.py +++ b/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.53.0" # {x-release-please-version} +__version__ = "3.54.0" # {x-release-please-version} diff --git a/google/cloud/spanner_admin_instance_v1/gapic_version.py b/google/cloud/spanner_admin_instance_v1/gapic_version.py index 9b205942db..9f7e08d550 100644 --- a/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ b/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.53.0" # {x-release-please-version} +__version__ = "3.54.0" # {x-release-please-version} diff --git a/google/cloud/spanner_v1/gapic_version.py b/google/cloud/spanner_v1/gapic_version.py index 9b205942db..9f7e08d550 100644 --- a/google/cloud/spanner_v1/gapic_version.py +++ b/google/cloud/spanner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.53.0" # {x-release-please-version} +__version__ = "3.54.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index fc77bc1740..9bbabdab00 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.53.0" + "version": "3.54.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 74eaaff2f8..765c9d46ed 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.53.0" + "version": "3.54.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index ba20d6b76a..c9c643d8b2 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.53.0" + "version": "3.54.0" }, "snippets": [ { pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy