From 58e2406af3c8918e37e0daadefaf537073aed1a4 Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Wed, 24 Sep 2025 10:58:36 +0530 Subject: [PATCH 1/4] docs: Add snippet for Repeatable Read configuration at client and transaction (#1326) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: snapshot isolation sample * updated the sample * lint samples * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- samples/samples/backup_sample.py | 3 + samples/samples/backup_sample_test.py | 9 +- samples/samples/backup_schedule_samples.py | 91 +++++++++------- .../samples/backup_schedule_samples_test.py | 56 +++++----- samples/samples/pg_snippets.py | 27 ++--- samples/samples/snippets.py | 100 ++++++++++++++---- samples/samples/snippets_test.py | 14 ++- 7 files changed, 188 insertions(+), 112 deletions(-) diff --git a/samples/samples/backup_sample.py b/samples/samples/backup_sample.py index e3a2b6957d..e984d3a11e 100644 --- a/samples/samples/backup_sample.py +++ b/samples/samples/backup_sample.py @@ -116,6 +116,7 @@ def create_backup_with_encryption_key( # [END spanner_create_backup_with_encryption_key] + # [START spanner_create_backup_with_MR_CMEK] def create_backup_with_multiple_kms_keys( instance_id, database_id, backup_id, kms_key_names @@ -246,6 +247,7 @@ def restore_database_with_encryption_key( # [END spanner_restore_backup_with_encryption_key] + # [START spanner_restore_backup_with_MR_CMEK] def restore_database_with_multiple_kms_keys( instance_id, new_database_id, backup_id, kms_key_names @@ -697,6 +699,7 @@ def copy_backup(instance_id, backup_id, source_backup_path): # [END spanner_copy_backup] + # [START spanner_copy_backup_with_MR_CMEK] def copy_backup_with_multiple_kms_keys( instance_id, backup_id, source_backup_path, kms_key_names diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 5ab1e747ab..b588d5735b 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -93,8 +93,7 @@ def test_create_backup_with_encryption_key( assert kms_key_name in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " - "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") @pytest.mark.dependency(name="create_backup_with_multiple_kms_keys") def test_create_backup_with_multiple_kms_keys( capsys, @@ -116,8 +115,7 @@ def test_create_backup_with_multiple_kms_keys( assert kms_key_names[2] in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " - "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") @pytest.mark.dependency(depends=["create_backup_with_multiple_kms_keys"]) def test_copy_backup_with_multiple_kms_keys( capsys, multi_region_instance_id, spanner_client, kms_key_names @@ -164,8 +162,7 @@ def test_restore_database_with_encryption_key( assert kms_key_name in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " - "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") @pytest.mark.dependency(depends=["create_backup_with_multiple_kms_keys"]) @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_restore_database_with_multiple_kms_keys( diff --git a/samples/samples/backup_schedule_samples.py b/samples/samples/backup_schedule_samples.py index 621febf0fc..c3c86b1538 100644 --- a/samples/samples/backup_schedule_samples.py +++ b/samples/samples/backup_schedule_samples.py @@ -24,25 +24,26 @@ # [START spanner_create_full_backup_schedule] def create_full_backup_schedule( - instance_id: str, - database_id: str, - schedule_id: str, + instance_id: str, + database_id: str, + schedule_id: str, ) -> None: from datetime import timedelta from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import \ - backup_schedule as backup_schedule_pb - from google.cloud.spanner_admin_database_v1.types import \ - CreateBackupEncryptionConfig, FullBackupSpec + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) + from google.cloud.spanner_admin_database_v1.types import ( + CreateBackupEncryptionConfig, + FullBackupSpec, + ) client = spanner.Client() database_admin_api = client.database_admin_api request = backup_schedule_pb.CreateBackupScheduleRequest( parent=database_admin_api.database_path( - client.project, - instance_id, - database_id + client.project, instance_id, database_id ), backup_schedule_id=schedule_id, backup_schedule=backup_schedule_pb.BackupSchedule( @@ -62,30 +63,32 @@ def create_full_backup_schedule( response = database_admin_api.create_backup_schedule(request) print(f"Created full backup schedule: {response}") + # [END spanner_create_full_backup_schedule] # [START spanner_create_incremental_backup_schedule] def create_incremental_backup_schedule( - instance_id: str, - database_id: str, - schedule_id: str, + instance_id: str, + database_id: str, + schedule_id: str, ) -> None: from datetime import timedelta from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import \ - backup_schedule as backup_schedule_pb - from google.cloud.spanner_admin_database_v1.types import \ - CreateBackupEncryptionConfig, IncrementalBackupSpec + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) + from google.cloud.spanner_admin_database_v1.types import ( + CreateBackupEncryptionConfig, + IncrementalBackupSpec, + ) client = spanner.Client() database_admin_api = client.database_admin_api request = backup_schedule_pb.CreateBackupScheduleRequest( parent=database_admin_api.database_path( - client.project, - instance_id, - database_id + client.project, instance_id, database_id ), backup_schedule_id=schedule_id, backup_schedule=backup_schedule_pb.BackupSchedule( @@ -105,14 +108,16 @@ def create_incremental_backup_schedule( response = database_admin_api.create_backup_schedule(request) print(f"Created incremental backup schedule: {response}") + # [END spanner_create_incremental_backup_schedule] # [START spanner_list_backup_schedules] def list_backup_schedules(instance_id: str, database_id: str) -> None: from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import \ - backup_schedule as backup_schedule_pb + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) client = spanner.Client() database_admin_api = client.database_admin_api @@ -128,18 +133,20 @@ def list_backup_schedules(instance_id: str, database_id: str) -> None: for backup_schedule in database_admin_api.list_backup_schedules(request): print(f"Backup schedule: {backup_schedule}") + # [END spanner_list_backup_schedules] # [START spanner_get_backup_schedule] def get_backup_schedule( - instance_id: str, - database_id: str, - schedule_id: str, + instance_id: str, + database_id: str, + schedule_id: str, ) -> None: from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import \ - backup_schedule as backup_schedule_pb + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) client = spanner.Client() database_admin_api = client.database_admin_api @@ -156,21 +163,24 @@ def get_backup_schedule( response = database_admin_api.get_backup_schedule(request) print(f"Backup schedule: {response}") + # [END spanner_get_backup_schedule] # [START spanner_update_backup_schedule] def update_backup_schedule( - instance_id: str, - database_id: str, - schedule_id: str, + instance_id: str, + database_id: str, + schedule_id: str, ) -> None: from datetime import timedelta from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import \ - backup_schedule as backup_schedule_pb - from google.cloud.spanner_admin_database_v1.types import \ - CreateBackupEncryptionConfig + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) + from google.cloud.spanner_admin_database_v1.types import ( + CreateBackupEncryptionConfig, + ) from google.protobuf.field_mask_pb2 import FieldMask client = spanner.Client() @@ -206,18 +216,20 @@ def update_backup_schedule( response = database_admin_api.update_backup_schedule(request) print(f"Updated backup schedule: {response}") + # [END spanner_update_backup_schedule] # [START spanner_delete_backup_schedule] def delete_backup_schedule( - instance_id: str, - database_id: str, - schedule_id: str, + instance_id: str, + database_id: str, + schedule_id: str, ) -> None: from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import \ - backup_schedule as backup_schedule_pb + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) client = spanner.Client() database_admin_api = client.database_admin_api @@ -234,6 +246,7 @@ def delete_backup_schedule( database_admin_api.delete_backup_schedule(request) print("Deleted backup schedule") + # [END spanner_delete_backup_schedule] diff --git a/samples/samples/backup_schedule_samples_test.py b/samples/samples/backup_schedule_samples_test.py index eb4be96b43..6584d89701 100644 --- a/samples/samples/backup_schedule_samples_test.py +++ b/samples/samples/backup_schedule_samples_test.py @@ -33,9 +33,9 @@ def database_id(): @pytest.mark.dependency(name="create_full_backup_schedule") def test_create_full_backup_schedule( - capsys, - sample_instance, - sample_database, + capsys, + sample_instance, + sample_database, ) -> None: samples.create_full_backup_schedule( sample_instance.instance_id, @@ -53,9 +53,9 @@ def test_create_full_backup_schedule( @pytest.mark.dependency(name="create_incremental_backup_schedule") def test_create_incremental_backup_schedule( - capsys, - sample_instance, - sample_database, + capsys, + sample_instance, + sample_database, ) -> None: samples.create_incremental_backup_schedule( sample_instance.instance_id, @@ -71,14 +71,16 @@ def test_create_incremental_backup_schedule( ) in out -@pytest.mark.dependency(depends=[ - "create_full_backup_schedule", - "create_incremental_backup_schedule", -]) +@pytest.mark.dependency( + depends=[ + "create_full_backup_schedule", + "create_incremental_backup_schedule", + ] +) def test_list_backup_schedules( - capsys, - sample_instance, - sample_database, + capsys, + sample_instance, + sample_database, ) -> None: samples.list_backup_schedules( sample_instance.instance_id, @@ -99,9 +101,9 @@ def test_list_backup_schedules( @pytest.mark.dependency(depends=["create_full_backup_schedule"]) def test_get_backup_schedule( - capsys, - sample_instance, - sample_database, + capsys, + sample_instance, + sample_database, ) -> None: samples.get_backup_schedule( sample_instance.instance_id, @@ -118,9 +120,9 @@ def test_get_backup_schedule( @pytest.mark.dependency(depends=["create_full_backup_schedule"]) def test_update_backup_schedule( - capsys, - sample_instance, - sample_database, + capsys, + sample_instance, + sample_database, ) -> None: samples.update_backup_schedule( sample_instance.instance_id, @@ -136,14 +138,16 @@ def test_update_backup_schedule( ) in out -@pytest.mark.dependency(depends=[ - "create_full_backup_schedule", - "create_incremental_backup_schedule", -]) +@pytest.mark.dependency( + depends=[ + "create_full_backup_schedule", + "create_incremental_backup_schedule", + ] +) def test_delete_backup_schedule( - capsys, - sample_instance, - sample_database, + capsys, + sample_instance, + sample_database, ) -> None: samples.delete_backup_schedule( sample_instance.instance_id, diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index ad8744794a..432d68a8ce 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -69,8 +69,7 @@ def create_instance(instance_id): def create_database(instance_id, database_id): """Creates a PostgreSql database and tables for sample data.""" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -91,8 +90,7 @@ def create_database(instance_id, database_id): def create_table_using_ddl(database_name): - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() request = spanner_database_admin.UpdateDatabaseDdlRequest( @@ -240,8 +238,7 @@ def read_data(instance_id, database_id): def add_column(instance_id, database_id): """Adds a new column to the Albums table in the example database.""" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -441,8 +438,7 @@ def read_data_with_index(instance_id, database_id): def add_storing_index(instance_id, database_id): """Adds an storing index to the example database.""" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -1091,8 +1087,7 @@ def create_table_with_datatypes(instance_id, database_id): # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -1476,8 +1471,7 @@ def add_jsonb_column(instance_id, database_id): # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -1593,8 +1587,7 @@ def query_data_with_jsonb_parameter(instance_id, database_id): def create_sequence(instance_id, database_id): """Creates the Sequence and insert data""" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -1651,8 +1644,7 @@ def insert_customers(transaction): def alter_sequence(instance_id, database_id): """Alters the Sequence and insert data""" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api @@ -1703,8 +1695,7 @@ def insert_customers(transaction): def drop_sequence(instance_id, database_id): """Drops the Sequence""" - from google.cloud.spanner_admin_database_v1.types import \ - spanner_database_admin + from google.cloud.spanner_admin_database_v1.types import spanner_database_admin spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 87b7ab86a2..96d8fd3f89 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -75,11 +75,11 @@ def create_instance(instance_id): # [END spanner_create_instance] + # [START spanner_update_instance] def update_instance(instance_id): """Updates an instance.""" - from google.cloud.spanner_admin_instance_v1.types import \ - spanner_instance_admin + from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin spanner_client = spanner.Client() @@ -366,6 +366,7 @@ def create_database_with_encryption_key(instance_id, database_id, kms_key_name): # [END spanner_create_database_with_encryption_key] + # [START spanner_create_database_with_MR_CMEK] def create_database_with_multiple_kms_keys(instance_id, database_id, kms_key_names): """Creates a database with tables using multiple KMS keys(CMEK).""" @@ -409,6 +410,7 @@ def create_database_with_multiple_kms_keys(instance_id, database_id, kms_key_nam # [END spanner_create_database_with_MR_CMEK] + # [START spanner_create_database_with_default_leader] def create_database_with_default_leader(instance_id, database_id, default_leader): """Creates a database with tables with a default leader.""" @@ -1591,7 +1593,11 @@ def __init__(self): super().__init__("commit_stats_sample") def info(self, msg, *args, **kwargs): - if "extra" in kwargs and kwargs["extra"] and "commit_stats" in kwargs["extra"]: + if ( + "extra" in kwargs + and kwargs["extra"] + and "commit_stats" in kwargs["extra"] + ): self.last_commit_stats = kwargs["extra"]["commit_stats"] super().info(msg, *args, **kwargs) @@ -3176,6 +3182,56 @@ def directed_read_options( # [END spanner_directed_read] +def isolation_level_options( + instance_id, + database_id, +): + from google.cloud.spanner_v1 import TransactionOptions, DefaultTransactionOptions + + """ + Shows how to run a Read Write transaction with isolation level options. + """ + # [START spanner_isolation_level] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + + # The isolation level specified at the client-level will be applied to all RW transactions. + isolation_options_for_client = TransactionOptions.IsolationLevel.SERIALIZABLE + + spanner_client = spanner.Client( + default_transaction_options=DefaultTransactionOptions( + isolation_level=isolation_options_for_client + ) + ) + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + # The isolation level specified at the request level takes precedence over the isolation level configured at the client level. + isolation_options_for_transaction = ( + TransactionOptions.IsolationLevel.REPEATABLE_READ + ) + + def update_albums_with_isolation(transaction): + # Read an AlbumTitle. + results = transaction.execute_sql( + "SELECT AlbumTitle from Albums WHERE SingerId = 1 and AlbumId = 1" + ) + for result in results: + print("Current Album Title: {}".format(*result)) + + # Update the AlbumTitle. + row_ct = transaction.execute_update( + "UPDATE Albums SET AlbumTitle = 'A New Title' WHERE SingerId = 1 and AlbumId = 1" + ) + + print("{} record(s) updated.".format(row_ct)) + + database.run_in_transaction( + update_albums_with_isolation, isolation_level=isolation_options_for_transaction + ) + # [END spanner_isolation_level] + + def set_custom_timeout_and_retry(instance_id, database_id): """Executes a snapshot read with custom timeout and retry.""" # [START spanner_set_custom_timeout_and_retry] @@ -3288,14 +3344,14 @@ def create_instance_without_default_backup_schedules(instance_id): ) operation = spanner_client.instance_admin_api.create_instance( - parent=spanner_client.project_name, - instance_id=instance_id, - instance=spanner_instance_admin.Instance( - config=config_name, - display_name="This is a display name.", - node_count=1, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, # Optional - ), + parent=spanner_client.project_name, + instance_id=instance_id, + instance=spanner_instance_admin.Instance( + config=config_name, + display_name="This is a display name.", + node_count=1, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, # Optional + ), ) print("Waiting for operation to complete...") @@ -3314,13 +3370,11 @@ def update_instance_default_backup_schedule_type(instance_id): name = "{}/instances/{}".format(spanner_client.project_name, instance_id) operation = spanner_client.instance_admin_api.update_instance( - instance=spanner_instance_admin.Instance( - name=name, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.AUTOMATIC, # Optional - ), - field_mask=field_mask_pb2.FieldMask( - paths=["default_backup_schedule_type"] - ), + instance=spanner_instance_admin.Instance( + name=name, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.AUTOMATIC, # Optional + ), + field_mask=field_mask_pb2.FieldMask(paths=["default_backup_schedule_type"]), ) print("Waiting for operation to complete...") @@ -3581,7 +3635,9 @@ def add_split_points(instance_id, database_id): database=database_admin_api.database_path( spanner_client.project, instance_id, database_id ), - statements=["CREATE INDEX IF NOT EXISTS SingersByFirstLastName ON Singers(FirstName, LastName)"], + statements=[ + "CREATE INDEX IF NOT EXISTS SingersByFirstLastName ON Singers(FirstName, LastName)" + ], ) operation = database_admin_api.update_database_ddl(request) @@ -3638,7 +3694,6 @@ def add_split_points(instance_id, database_id): values=[struct_pb2.Value(string_value="38")] ) ), - ], ), ], @@ -3798,6 +3853,9 @@ def add_split_points(instance_id, database_id): ) enable_fine_grained_access_parser.add_argument("--title", default="condition title") subparsers.add_parser("directed_read_options", help=directed_read_options.__doc__) + subparsers.add_parser( + "isolation_level_options", help=isolation_level_options.__doc__ + ) subparsers.add_parser( "set_custom_timeout_and_retry", help=set_custom_timeout_and_retry.__doc__ ) @@ -3958,6 +4016,8 @@ def add_split_points(instance_id, database_id): ) elif args.command == "directed_read_options": directed_read_options(args.instance_id, args.database_id) + elif args.command == "isolation_level_options": + isolation_level_options(args.instance_id, args.database_id) elif args.command == "set_custom_timeout_and_retry": set_custom_timeout_and_retry(args.instance_id, args.database_id) elif args.command == "create_instance_with_autoscaling_config": diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 72f243fdb5..03c9f2682c 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -197,7 +197,9 @@ def test_create_instance_with_autoscaling_config(capsys, lci_instance_id): retry_429(instance.delete)() -def test_create_and_update_instance_default_backup_schedule_type(capsys, lci_instance_id): +def test_create_and_update_instance_default_backup_schedule_type( + capsys, lci_instance_id +): retry_429(snippets.create_instance_without_default_backup_schedules)( lci_instance_id, ) @@ -252,8 +254,7 @@ def test_create_database_with_encryption_config( assert kms_key_name in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " - "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") def test_create_database_with_multiple_kms_keys( capsys, multi_region_instance, @@ -991,6 +992,13 @@ def test_set_custom_timeout_and_retry(capsys, instance_id, sample_database): assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out +@pytest.mark.dependency(depends=["insert_data"]) +def test_isolated_level_options(capsys, instance_id, sample_database): + snippets.isolation_level_options(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "1 record(s) updated." in out + + @pytest.mark.dependency( name="add_proto_types_column", ) From a09961b381314e3f06f1ff4be7b672cd9da9c64b Mon Sep 17 00:00:00 2001 From: Subham Sinha <35077434+sinhasubham@users.noreply.github.com> Date: Thu, 9 Oct 2025 16:18:54 +0530 Subject: [PATCH 2/4] feat(spanner): add lazy decode to partitioned query (#1411) --- google/cloud/spanner_v1/database.py | 29 ++++- google/cloud/spanner_v1/merged_result_set.py | 41 ++++++- tests/unit/test_database.py | 15 +++ tests/unit/test_merged_result_set.py | 119 +++++++++++++++++++ 4 files changed, 199 insertions(+), 5 deletions(-) create mode 100644 tests/unit/test_merged_result_set.py diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 215cd5bed8..c5fc56bcc9 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -1532,6 +1532,14 @@ def to_dict(self): "transaction_id": snapshot._transaction_id, } + def __enter__(self): + """Begin ``with`` block.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """End ``with`` block.""" + self.close() + @property def observability_options(self): return getattr(self._database, "observability_options", {}) @@ -1703,6 +1711,7 @@ def process_read_batch( *, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, + lazy_decode=False, ): """Process a single, partitioned read. @@ -1717,6 +1726,14 @@ def process_read_batch( :type timeout: float :param timeout: (Optional) The timeout for this request. + :type lazy_decode: bool + :param lazy_decode: + (Optional) If this argument is set to ``true``, the iterator + returns the underlying protobuf values instead of decoded Python + objects. This reduces the time that is needed to iterate through + large result sets. The application is responsible for decoding + the data that is needed. + :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. @@ -1844,6 +1861,7 @@ def process_query_batch( self, batch, *, + lazy_decode: bool = False, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, ): @@ -1854,6 +1872,13 @@ def process_query_batch( one of the mappings returned from an earlier call to :meth:`generate_query_batches`. + :type lazy_decode: bool + :param lazy_decode: + (Optional) If this argument is set to ``true``, the iterator + returns the underlying protobuf values instead of decoded Python + objects. This reduces the time that is needed to iterate through + large result sets. + :type retry: :class:`~google.api_core.retry.Retry` :param retry: (Optional) The retry settings for this request. @@ -1870,6 +1895,7 @@ def process_query_batch( return self._get_snapshot().execute_sql( partition=batch["partition"], **batch["query"], + lazy_decode=lazy_decode, retry=retry, timeout=timeout, ) @@ -1883,6 +1909,7 @@ def run_partitioned_query( max_partitions=None, query_options=None, data_boost_enabled=False, + lazy_decode=False, ): """Start a partitioned query operation to get list of partitions and then executes each partition on a separate thread @@ -1943,7 +1970,7 @@ def run_partitioned_query( data_boost_enabled, ) ) - return MergedResultSet(self, partitions, 0) + return MergedResultSet(self, partitions, 0, lazy_decode=lazy_decode) def process(self, batch): """Process a single, partitioned query or read. diff --git a/google/cloud/spanner_v1/merged_result_set.py b/google/cloud/spanner_v1/merged_result_set.py index 7af989d696..6c5c792246 100644 --- a/google/cloud/spanner_v1/merged_result_set.py +++ b/google/cloud/spanner_v1/merged_result_set.py @@ -33,10 +33,13 @@ class PartitionExecutor: rows in the queue """ - def __init__(self, batch_snapshot, partition_id, merged_result_set): + def __init__( + self, batch_snapshot, partition_id, merged_result_set, lazy_decode=False + ): self._batch_snapshot: BatchSnapshot = batch_snapshot self._partition_id = partition_id self._merged_result_set: MergedResultSet = merged_result_set + self._lazy_decode = lazy_decode self._queue: Queue[PartitionExecutorResult] = merged_result_set._queue def run(self): @@ -52,7 +55,9 @@ def run(self): def __run(self): results = None try: - results = self._batch_snapshot.process_query_batch(self._partition_id) + results = self._batch_snapshot.process_query_batch( + self._partition_id, lazy_decode=self._lazy_decode + ) for row in results: if self._merged_result_set._metadata is None: self._set_metadata(results) @@ -75,6 +80,7 @@ def _set_metadata(self, results, is_exception=False): try: if not is_exception: self._merged_result_set._metadata = results.metadata + self._merged_result_set._result_set = results finally: self._merged_result_set.metadata_lock.release() self._merged_result_set.metadata_event.set() @@ -94,7 +100,10 @@ class MergedResultSet: records in the MergedResultSet is not guaranteed. """ - def __init__(self, batch_snapshot, partition_ids, max_parallelism): + def __init__( + self, batch_snapshot, partition_ids, max_parallelism, lazy_decode=False + ): + self._result_set = None self._exception = None self._metadata = None self.metadata_event = Event() @@ -110,7 +119,7 @@ def __init__(self, batch_snapshot, partition_ids, max_parallelism): partition_executors = [] for partition_id in partition_ids: partition_executors.append( - PartitionExecutor(batch_snapshot, partition_id, self) + PartitionExecutor(batch_snapshot, partition_id, self, lazy_decode) ) executor = ThreadPoolExecutor(max_workers=parallelism) for partition_executor in partition_executors: @@ -144,3 +153,27 @@ def metadata(self): def stats(self): # TODO: Implement return None + + def decode_row(self, row: []) -> []: + """Decodes a row from protobuf values to Python objects. This function + should only be called for result sets that use ``lazy_decoding=True``. + The array that is returned by this function is the same as the array + that would have been returned by the rows iterator if ``lazy_decoding=False``. + + :returns: an array containing the decoded values of all the columns in the given row + """ + if self._result_set is None: + raise ValueError("iterator not started") + return self._result_set.decode_row(row) + + def decode_column(self, row: [], column_index: int): + """Decodes a column from a protobuf value to a Python object. This function + should only be called for result sets that use ``lazy_decoding=True``. + The object that is returned by this function is the same as the object + that would have been returned by the rows iterator if ``lazy_decoding=False``. + + :returns: the decoded column value + """ + if self._result_set is None: + raise ValueError("iterator not started") + return self._result_set.decode_column(row, column_index) diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py index 1c7f58c4ab..fa6792b9da 100644 --- a/tests/unit/test_database.py +++ b/tests/unit/test_database.py @@ -3141,6 +3141,7 @@ def test_process_query_batch(self): params=params, param_types=param_types, partition=token, + lazy_decode=False, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, ) @@ -3170,6 +3171,7 @@ def test_process_query_batch_w_retry_timeout(self): params=params, param_types=param_types, partition=token, + lazy_decode=False, retry=retry, timeout=2.0, ) @@ -3193,11 +3195,23 @@ def test_process_query_batch_w_directed_read_options(self): snapshot.execute_sql.assert_called_once_with( sql=sql, partition=token, + lazy_decode=False, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, directed_read_options=DIRECTED_READ_OPTIONS, ) + def test_context_manager(self): + database = self._make_database() + batch_txn = self._make_one(database) + session = batch_txn._session = self._make_session() + session.is_multiplexed = False + + with batch_txn: + pass + + session.delete.assert_called_once_with() + def test_close_wo_session(self): database = self._make_database() batch_txn = self._make_one(database) @@ -3292,6 +3306,7 @@ def test_process_w_query_batch(self): params=params, param_types=param_types, partition=token, + lazy_decode=False, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, ) diff --git a/tests/unit/test_merged_result_set.py b/tests/unit/test_merged_result_set.py new file mode 100644 index 0000000000..99fe50765e --- /dev/null +++ b/tests/unit/test_merged_result_set.py @@ -0,0 +1,119 @@ +# Copyright 2025 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock +from google.cloud.spanner_v1.streamed import StreamedResultSet + + +class TestMergedResultSet(unittest.TestCase): + def _get_target_class(self): + from google.cloud.spanner_v1.merged_result_set import MergedResultSet + + return MergedResultSet + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + obj = super(klass, klass).__new__(klass) + from threading import Event, Lock + + obj.metadata_event = Event() + obj.metadata_lock = Lock() + obj._metadata = None + obj._result_set = None + return obj + + @staticmethod + def _make_value(value): + from google.cloud.spanner_v1._helpers import _make_value_pb + + return _make_value_pb(value) + + @staticmethod + def _make_scalar_field(name, type_): + from google.cloud.spanner_v1 import StructType + from google.cloud.spanner_v1 import Type + + return StructType.Field(name=name, type_=Type(code=type_)) + + @staticmethod + def _make_result_set_metadata(fields=()): + from google.cloud.spanner_v1 import ResultSetMetadata + from google.cloud.spanner_v1 import StructType + + metadata = ResultSetMetadata(row_type=StructType(fields=[])) + for field in fields: + metadata.row_type.fields.append(field) + return metadata + + def test_stats_property(self): + merged = self._make_one() + # The property is currently not implemented, so it should just return None. + self.assertIsNone(merged.stats) + + def test_decode_row(self): + merged = self._make_one() + + merged._result_set = mock.create_autospec(StreamedResultSet, instance=True) + merged._result_set.decode_row.return_value = ["Phred", 42] + + raw_row = [self._make_value("Phred"), self._make_value(42)] + decoded_row = merged.decode_row(raw_row) + + self.assertEqual(decoded_row, ["Phred", 42]) + merged._result_set.decode_row.assert_called_once_with(raw_row) + + def test_decode_row_no_result_set(self): + merged = self._make_one() + merged._result_set = None + with self.assertRaisesRegex(ValueError, "iterator not started"): + merged.decode_row([]) + + def test_decode_row_type_error(self): + merged = self._make_one() + merged._result_set = mock.create_autospec(StreamedResultSet, instance=True) + merged._result_set.decode_row.side_effect = TypeError + + with self.assertRaises(TypeError): + merged.decode_row("not a list") + + def test_decode_column(self): + merged = self._make_one() + merged._result_set = mock.create_autospec(StreamedResultSet, instance=True) + merged._result_set.decode_column.side_effect = ["Phred", 42] + + raw_row = [self._make_value("Phred"), self._make_value(42)] + decoded_name = merged.decode_column(raw_row, 0) + decoded_age = merged.decode_column(raw_row, 1) + + self.assertEqual(decoded_name, "Phred") + self.assertEqual(decoded_age, 42) + merged._result_set.decode_column.assert_has_calls( + [mock.call(raw_row, 0), mock.call(raw_row, 1)] + ) + + def test_decode_column_no_result_set(self): + merged = self._make_one() + merged._result_set = None + with self.assertRaisesRegex(ValueError, "iterator not started"): + merged.decode_column([], 0) + + def test_decode_column_type_error(self): + merged = self._make_one() + merged._result_set = mock.create_autospec(StreamedResultSet, instance=True) + merged._result_set.decode_column.side_effect = TypeError + + with self.assertRaises(TypeError): + merged.decode_column("not a list", 0) From 7266686d6773f39a30603061ae881e258421d927 Mon Sep 17 00:00:00 2001 From: Subham Sinha <35077434+sinhasubham@users.noreply.github.com> Date: Sat, 18 Oct 2025 09:17:38 +0530 Subject: [PATCH 3/4] fix(spanner): resolve TypeError in metrics resource detection (#1446) * fix(spanner): resolve TypeError in metrics resource detection * fix(spanner): add exception handling for metrics initialization --- google/cloud/spanner_v1/client.py | 31 +++++++----- .../metrics/spanner_metrics_tracer_factory.py | 28 +++++++---- tests/unit/test_client.py | 23 +++++++++ .../test_spanner_metrics_tracer_factory.py | 47 +++++++++++++++++++ 4 files changed, 108 insertions(+), 21 deletions(-) diff --git a/google/cloud/spanner_v1/client.py b/google/cloud/spanner_v1/client.py index e0e8c44058..6ebabbb34e 100644 --- a/google/cloud/spanner_v1/client.py +++ b/google/cloud/spanner_v1/client.py @@ -25,6 +25,7 @@ """ import grpc import os +import logging import warnings from google.api_core.gapic_v1 import client_info @@ -97,6 +98,9 @@ def _get_spanner_optimizer_statistics_package(): return os.getenv(OPTIMIZER_STATISITCS_PACKAGE_ENV_VAR, "") +log = logging.getLogger(__name__) + + def _get_spanner_enable_builtin_metrics(): return os.getenv(ENABLE_SPANNER_METRICS_ENV_VAR) == "true" @@ -240,19 +244,24 @@ def __init__( and HAS_GOOGLE_CLOUD_MONITORING_INSTALLED ): meter_provider = metrics.NoOpMeterProvider() - if not _get_spanner_emulator_host(): - meter_provider = MeterProvider( - metric_readers=[ - PeriodicExportingMetricReader( - CloudMonitoringMetricsExporter( - project_id=project, credentials=credentials + try: + if not _get_spanner_emulator_host(): + meter_provider = MeterProvider( + metric_readers=[ + PeriodicExportingMetricReader( + CloudMonitoringMetricsExporter( + project_id=project, credentials=credentials + ), + export_interval_millis=METRIC_EXPORT_INTERVAL_MS, ), - export_interval_millis=METRIC_EXPORT_INTERVAL_MS, - ) - ] + ] + ) + metrics.set_meter_provider(meter_provider) + SpannerMetricsTracerFactory() + except Exception as e: + log.warning( + "Failed to initialize Spanner built-in metrics. Error: %s", e ) - metrics.set_meter_provider(meter_provider) - SpannerMetricsTracerFactory() else: SpannerMetricsTracerFactory(enabled=False) diff --git a/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py b/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py index fd00c4de9c..881a5bfca9 100644 --- a/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py +++ b/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py @@ -17,6 +17,7 @@ from .metrics_tracer_factory import MetricsTracerFactory import os +import logging from .constants import ( SPANNER_SERVICE_NAME, GOOGLE_CLOUD_REGION_KEY, @@ -33,9 +34,6 @@ import mmh3 - # Override Resource detector logging to not warn when GCP resources are not detected - import logging - logging.getLogger("opentelemetry.resourcedetector.gcp_resource_detector").setLevel( logging.ERROR ) @@ -48,6 +46,8 @@ from google.cloud.spanner_v1 import __version__ from uuid import uuid4 +log = logging.getLogger(__name__) + class SpannerMetricsTracerFactory(MetricsTracerFactory): """A factory for creating SpannerMetricsTracer instances.""" @@ -158,15 +158,23 @@ def _generate_client_hash(client_uid: str) -> str: def _get_location() -> str: """Get the location of the resource. + In case of any error during detection, this method will log a warning + and default to the "global" location. + Returns: str: The location of the resource. If OpenTelemetry is not installed, returns a global region. """ if not HAS_OPENTELEMETRY_INSTALLED: return GOOGLE_CLOUD_REGION_GLOBAL - detector = gcp_resource_detector.GoogleCloudResourceDetector() - resources = detector.detect() - - if GOOGLE_CLOUD_REGION_KEY not in resources.attributes: - return GOOGLE_CLOUD_REGION_GLOBAL - else: - return resources[GOOGLE_CLOUD_REGION_KEY] + try: + detector = gcp_resource_detector.GoogleCloudResourceDetector() + resources = detector.detect() + + if GOOGLE_CLOUD_REGION_KEY in resources.attributes: + return resources.attributes[GOOGLE_CLOUD_REGION_KEY] + except Exception as e: + log.warning( + "Failed to detect GCP resource location for Spanner metrics, defaulting to 'global'. Error: %s", + e, + ) + return GOOGLE_CLOUD_REGION_GLOBAL diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 212dc9ee4f..f0d246673a 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -255,6 +255,29 @@ def test_constructor_w_directed_read_options(self): expected_scopes, creds, directed_read_options=self.DIRECTED_READ_OPTIONS ) + @mock.patch.dict(os.environ, {"SPANNER_ENABLE_BUILTIN_METRICS": "true"}) + @mock.patch("google.cloud.spanner_v1.client.SpannerMetricsTracerFactory") + def test_constructor_w_metrics_initialization_error( + self, mock_spanner_metrics_factory + ): + """ + Test that Client constructor handles exceptions during metrics + initialization and logs a warning. + """ + from google.cloud.spanner_v1.client import Client + + mock_spanner_metrics_factory.side_effect = Exception("Metrics init failed") + creds = build_scoped_credentials() + + with self.assertLogs("google.cloud.spanner_v1.client", level="WARNING") as log: + client = Client(project=self.PROJECT, credentials=creds) + self.assertIsNotNone(client) + self.assertIn( + "Failed to initialize Spanner built-in metrics. Error: Metrics init failed", + log.output[0], + ) + mock_spanner_metrics_factory.assert_called_once() + def test_constructor_route_to_leader_disbled(self): from google.cloud.spanner_v1 import client as MUT diff --git a/tests/unit/test_spanner_metrics_tracer_factory.py b/tests/unit/test_spanner_metrics_tracer_factory.py index 8ee4d53d3d..48fe1b4837 100644 --- a/tests/unit/test_spanner_metrics_tracer_factory.py +++ b/tests/unit/test_spanner_metrics_tracer_factory.py @@ -13,9 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest +import unittest +from unittest import mock + +from google.cloud.spanner_v1.metrics.constants import GOOGLE_CLOUD_REGION_KEY from google.cloud.spanner_v1.metrics.spanner_metrics_tracer_factory import ( SpannerMetricsTracerFactory, ) +from opentelemetry.sdk.resources import Resource + +pytest.importorskip("opentelemetry") class TestSpannerMetricsTracerFactory: @@ -48,3 +56,42 @@ def test_get_location(self): location = SpannerMetricsTracerFactory._get_location() assert isinstance(location, str) assert location # Simply asserting for non empty as this can change depending on the instance this test runs in. + + +class TestSpannerMetricsTracerFactoryGetLocation(unittest.TestCase): + @mock.patch( + "opentelemetry.resourcedetector.gcp_resource_detector.GoogleCloudResourceDetector.detect" + ) + def test_get_location_with_region(self, mock_detect): + """Test that _get_location returns the region when detected.""" + mock_resource = Resource.create({GOOGLE_CLOUD_REGION_KEY: "us-central1"}) + mock_detect.return_value = mock_resource + + location = SpannerMetricsTracerFactory._get_location() + assert location == "us-central1" + + @mock.patch( + "opentelemetry.resourcedetector.gcp_resource_detector.GoogleCloudResourceDetector.detect" + ) + def test_get_location_without_region(self, mock_detect): + """Test that _get_location returns 'global' when no region is detected.""" + mock_resource = Resource.create({}) # No region attribute + mock_detect.return_value = mock_resource + + location = SpannerMetricsTracerFactory._get_location() + assert location == "global" + + @mock.patch( + "opentelemetry.resourcedetector.gcp_resource_detector.GoogleCloudResourceDetector.detect" + ) + def test_get_location_with_exception(self, mock_detect): + """Test that _get_location returns 'global' and logs a warning on exception.""" + mock_detect.side_effect = Exception("detector failed") + + with self.assertLogs( + "google.cloud.spanner_v1.metrics.spanner_metrics_tracer_factory", + level="WARNING", + ) as log: + location = SpannerMetricsTracerFactory._get_location() + assert location == "global" + self.assertIn("Failed to detect GCP resource location", log.output[0]) From 8818c30afec265315e0914c96c3c13435bbff90a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 23 Oct 2025 14:59:28 +0530 Subject: [PATCH 4/4] chore(main): release 3.59.0 (#1412) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 17 +++++++++++++++++ .../spanner_admin_database_v1/gapic_version.py | 2 +- .../spanner_admin_instance_v1/gapic_version.py | 2 +- google/cloud/spanner_v1/gapic_version.py | 2 +- ...tadata_google.spanner.admin.database.v1.json | 2 +- ...tadata_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- 8 files changed, 24 insertions(+), 7 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 63ab47b126..6c2193e315 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.58.0" + ".": "3.59.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c2f33e74f..b5bbe07325 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## [3.59.0](https://github.com/googleapis/python-spanner/compare/v3.58.0...v3.59.0) (2025-10-18) + + +### Features + +* **spanner:** Add lazy decode to partitioned query ([#1411](https://github.com/googleapis/python-spanner/issues/1411)) ([a09961b](https://github.com/googleapis/python-spanner/commit/a09961b381314e3f06f1ff4be7b672cd9da9c64b)) + + +### Bug Fixes + +* **spanner:** Resolve TypeError in metrics resource detection ([#1446](https://github.com/googleapis/python-spanner/issues/1446)) ([7266686](https://github.com/googleapis/python-spanner/commit/7266686d6773f39a30603061ae881e258421d927)) + + +### Documentation + +* Add snippet for Repeatable Read configuration at client and transaction ([#1326](https://github.com/googleapis/python-spanner/issues/1326)) ([58e2406](https://github.com/googleapis/python-spanner/commit/58e2406af3c8918e37e0daadefaf537073aed1a4)) + ## [3.58.0](https://github.com/googleapis/python-spanner/compare/v3.57.0...v3.58.0) (2025-09-10) diff --git a/google/cloud/spanner_admin_database_v1/gapic_version.py b/google/cloud/spanner_admin_database_v1/gapic_version.py index fa3f4c040d..17acb3026a 100644 --- a/google/cloud/spanner_admin_database_v1/gapic_version.py +++ b/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.58.0" # {x-release-please-version} +__version__ = "3.59.0" # {x-release-please-version} diff --git a/google/cloud/spanner_admin_instance_v1/gapic_version.py b/google/cloud/spanner_admin_instance_v1/gapic_version.py index fa3f4c040d..17acb3026a 100644 --- a/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ b/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.58.0" # {x-release-please-version} +__version__ = "3.59.0" # {x-release-please-version} diff --git a/google/cloud/spanner_v1/gapic_version.py b/google/cloud/spanner_v1/gapic_version.py index fa3f4c040d..17acb3026a 100644 --- a/google/cloud/spanner_v1/gapic_version.py +++ b/google/cloud/spanner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.58.0" # {x-release-please-version} +__version__ = "3.59.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index d10e70605f..e6eeb1f977 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.58.0" + "version": "3.59.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 05a040bd1b..92ae0279ef 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.58.0" + "version": "3.59.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index 1eb4c96ad5..4d84b1ab9a 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.58.0" + "version": "3.59.0" }, "snippets": [ {