- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md new file mode 100644 index 0000000000..55c97b32f4 --- /dev/null +++ b/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md new file mode 100644 index 0000000000..34c882b6f1 --- /dev/null +++ b/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/samples/samples/README.rst b/samples/samples/README.rst index 143402fde5..b0573c249b 100644 --- a/samples/samples/README.rst +++ b/samples/samples/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Spanner Python Samples @@ -14,10 +15,12 @@ This directory contains samples for Google Cloud Spanner. `Google Cloud Spanner` .. _Google Cloud Spanner: https://cloud.google.com/spanner/docs + Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -28,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -42,7 +48,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash @@ -58,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -76,32 +88,10 @@ To run this sample: $ python snippets.py + usage: snippets.py [-h] [--database-id DATABASE_ID] instance_id - {create_database,insert_data,query_data,read_data, - read_stale_data,add_column,update_data, - query_data_with_new_column,read_write_transaction, - read_only_transaction,add_index,query_data_with_index, - read_data_with_index,add_storing_index, - read_data_with_storing_index, - create_table_with_timestamp,insert_data_with_timestamp, - add_timestamp_column,update_data_with_timestamp, - query_data_with_timestamp,write_struct_data, - query_with_struct,query_with_array_of_struct, - query_struct_field,query_nested_struct_field, - insert_data_with_dml,update_data_with_dml, - delete_data_with_dml,update_data_with_dml_timestamp, - dml_write_read_transaction,update_data_with_dml_struct, - insert_with_dml,query_data_with_parameter, - write_with_dml_transaction, - update_data_with_partitioned_dml, - delete_data_with_partitioned_dml,update_with_batch_dml, - create_table_with_datatypes,insert_datatypes_data, - query_data_with_array,query_data_with_bool, - query_data_with_bytes,query_data_with_date, - query_data_with_float,query_data_with_int, - query_data_with_string, - query_data_with_timestamp_parameter} + {create_instance,create_database,insert_data,delete_data,query_data,read_data,read_stale_data,add_column,update_data,query_data_with_new_column,read_write_transaction,read_only_transaction,add_index,query_data_with_index,read_data_with_index,add_storing_index,read_data_with_storing_index,create_table_with_timestamp,insert_data_with_timestamp,add_timestamp_column,update_data_with_timestamp,query_data_with_timestamp,write_struct_data,query_with_struct,query_with_array_of_struct,query_struct_field,query_nested_struct_field,insert_data_with_dml,update_data_with_dml,delete_data_with_dml,update_data_with_dml_timestamp,dml_write_read_transaction,update_data_with_dml_struct,insert_with_dml,query_data_with_parameter,write_with_dml_transaction,update_data_with_partitioned_dml,delete_data_with_partitioned_dml,update_with_batch_dml,create_table_with_datatypes,insert_datatypes_data,query_data_with_array,query_data_with_bool,query_data_with_bytes,query_data_with_date,query_data_with_float,query_data_with_int,query_data_with_string,query_data_with_timestamp_parameter,query_data_with_query_options,create_client_with_query_options} ... This application demonstrates how to do basic operations using Cloud @@ -111,32 +101,15 @@ To run this sample: positional arguments: instance_id Your Cloud Spanner instance ID. - {create_database, insert_data, delete_data, query_data, read_data, - read_stale_data, add_column, update_data, query_data_with_new_column, - read_write_transaction, read_only_transaction, add_index, - query_data_with_index, read_data_with_index, add_storing_index, - read_data_with_storing_index, create_table_with_timestamp, - insert_data_with_timestamp, add_timestamp_column, - update_data_with_timestamp, query_data_with_timestamp, - write_struct_data, query_with_struct, query_with_array_of_struct, - query_struct_field, query_nested_struct_field, insert_data_with_dml, - update_data_with_dml, delete_data_with_dml, - update_data_with_dml_timestamp, dml_write_read_transaction, - update_data_with_dml_struct, insert_with_dml, query_data_with_parameter, - write_with_dml_transaction, update_data_with_partitioned_dml, - delete_data_with_partitioned_dml, update_with_batch_dml, - create_table_with_datatypes, insert_datatypes_data, - query_data_with_array, query_data_with_bool, query_data_with_bytes, - query_data_with_date, query_data_with_float, query_data_with_int, - query_data_with_string, query_data_with_timestamp_parameter} + {create_instance,create_database,insert_data,delete_data,query_data,read_data,read_stale_data,add_column,update_data,query_data_with_new_column,read_write_transaction,read_only_transaction,add_index,query_data_with_index,read_data_with_index,add_storing_index,read_data_with_storing_index,create_table_with_timestamp,insert_data_with_timestamp,add_timestamp_column,update_data_with_timestamp,query_data_with_timestamp,write_struct_data,query_with_struct,query_with_array_of_struct,query_struct_field,query_nested_struct_field,insert_data_with_dml,update_data_with_dml,delete_data_with_dml,update_data_with_dml_timestamp,dml_write_read_transaction,update_data_with_dml_struct,insert_with_dml,query_data_with_parameter,write_with_dml_transaction,update_data_with_partitioned_dml,delete_data_with_partitioned_dml,update_with_batch_dml,create_table_with_datatypes,insert_datatypes_data,query_data_with_array,query_data_with_bool,query_data_with_bytes,query_data_with_date,query_data_with_float,query_data_with_int,query_data_with_string,query_data_with_timestamp_parameter,query_data_with_query_options,create_client_with_query_options} + create_instance Creates an instance. create_database Creates a database and tables for sample data. insert_data Inserts sample data into the given database. The database and table must already exist and can be created using `create_database`. delete_data Deletes sample data from the given database. The - database, table, and data must already exist and - can be created using `create_database` and - `insert_data`. + database, table, and data must already exist and can + be created using `create_database` and `insert_data`. query_data Queries sample data from the database using SQL. read_data Reads sample data from the database. read_stale_data Reads sample data from the database. The data is @@ -237,59 +210,53 @@ To run this sample: Deletes sample data from the database using a DML statement. update_data_with_dml_timestamp - Updates data with Timestamp from the database using - a DML statement. + Updates data with Timestamp from the database using a + DML statement. dml_write_read_transaction First inserts data then reads it from within a transaction using DML. update_data_with_dml_struct Updates data with a DML statement and STRUCT parameters. - insert_with_dml Inserts data with a DML statement into the - database. + insert_with_dml Inserts data with a DML statement into the database. query_data_with_parameter - Queries sample data from the database using SQL - with a parameter. + Queries sample data from the database using SQL with a + parameter. write_with_dml_transaction - Transfers part of a marketing budget from one - album to another. + Transfers part of a marketing budget from one album to + another. update_data_with_partitioned_dml - Update sample data with a partitioned DML - statement. + Update sample data with a partitioned DML statement. delete_data_with_partitioned_dml - Delete sample data with a partitioned DML - statement. + Delete sample data with a partitioned DML statement. update_with_batch_dml - Updates sample data in the database using Batch - DML. + Updates sample data in the database using Batch DML. create_table_with_datatypes Creates a table with supported dataypes. insert_datatypes_data Inserts data with supported datatypes into a table. query_data_with_array - Queries sample data using SQL with an ARRAY - parameter. + Queries sample data using SQL with an ARRAY parameter. query_data_with_bool - Queries sample data using SQL with a BOOL - parameter. + Queries sample data using SQL with a BOOL parameter. query_data_with_bytes - Queries sample data using SQL with a BYTES - parameter. + Queries sample data using SQL with a BYTES parameter. query_data_with_date - Queries sample data using SQL with a DATE - parameter. + Queries sample data using SQL with a DATE parameter. query_data_with_float Queries sample data using SQL with a FLOAT64 parameter. query_data_with_int - Queries sample data using SQL with a INT64 - parameter. + Queries sample data using SQL with a INT64 parameter. query_data_with_string - Queries sample data using SQL with a STRING - parameter. + Queries sample data using SQL with a STRING parameter. query_data_with_timestamp_parameter Queries sample data using SQL with a TIMESTAMP parameter. + query_data_with_query_options + Queries sample data using SQL with query options. + create_client_with_query_options + Create a client with query options. optional arguments: -h, --help show this help message and exit @@ -300,6 +267,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -315,4 +286,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/samples/samples/backup_sample.py b/samples/samples/backup_sample.py index e19bd797f4..19b758d560 100644 --- a/samples/samples/backup_sample.py +++ b/samples/samples/backup_sample.py @@ -19,10 +19,7 @@ """ import argparse -from datetime import ( - datetime, - timedelta -) +from datetime import datetime, timedelta import time from google.cloud import spanner @@ -37,8 +34,7 @@ def create_backup(instance_id, database_id, backup_id): # Create a backup expire_time = datetime.utcnow() + timedelta(days=14) - backup = instance.backup( - backup_id, database=database, expire_time=expire_time) + backup = instance.backup(backup_id, database=database, expire_time=expire_time) operation = backup.create() # Wait for backup operation to complete. @@ -50,8 +46,13 @@ def create_backup(instance_id, database_id, backup_id): # Get the name, create time and backup size. backup.reload() - print("Backup {} of size {} bytes was created at {}".format( - backup.name, backup.size_bytes, backup.create_time)) + print( + "Backup {} of size {} bytes was created at {}".format( + backup.name, backup.size_bytes, backup.create_time + ) + ) + + # [END spanner_create_backup] @@ -73,10 +74,15 @@ def restore_database(instance_id, new_database_id, backup_id): # Newly created database has restore information. new_database.reload() restore_info = new_database.restore_info - print("Database {} restored to {} from backup {}.".format( - restore_info.backup_info.source_database, - new_database_id, - restore_info.backup_info.backup)) + print( + "Database {} restored to {} from backup {}.".format( + restore_info.backup_info.source_database, + new_database_id, + restore_info.backup_info.backup, + ) + ) + + # [END spanner_restore_database] @@ -89,8 +95,7 @@ def cancel_backup(instance_id, database_id, backup_id): expire_time = datetime.utcnow() + timedelta(days=30) # Create a backup. - backup = instance.backup( - backup_id, database=database, expire_time=expire_time) + backup = instance.backup(backup_id, database=database, expire_time=expire_time) operation = backup.create() # Cancel backup creation. @@ -108,6 +113,8 @@ def cancel_backup(instance_id, database_id, backup_id): print("Backup deleted.") else: print("Backup creation was successfully cancelled.") + + # [END spanner_cancel_backup] @@ -125,9 +132,13 @@ def list_backup_operations(instance_id, database_id): operations = instance.list_backup_operations(filter_=filter_) for op in operations: metadata = op.metadata - print("Backup {} on database {}: {}% complete.".format( - metadata.name, metadata.database, - metadata.progress.progress_percent)) + print( + "Backup {} on database {}: {}% complete.".format( + metadata.name, metadata.database, metadata.progress.progress_percent + ) + ) + + # [END spanner_list_backup_operations] @@ -143,8 +154,13 @@ def list_database_operations(instance_id): ) operations = instance.list_database_operations(filter_=filter_) for op in operations: - print("Database {} restored from backup is {}% optimized.".format( - op.metadata.name, op.metadata.progress.progress_percent)) + print( + "Database {} restored from backup is {}% optimized.".format( + op.metadata.name, op.metadata.progress.progress_percent + ) + ) + + # [END spanner_list_database_operations] @@ -159,22 +175,25 @@ def list_backups(instance_id, database_id, backup_id): print(backup.name) # List all backups that contain a name. - print("All backups with backup name containing \"{}\":".format(backup_id)) + print('All backups with backup name containing "{}":'.format(backup_id)) for backup in instance.list_backups(filter_="name:{}".format(backup_id)): print(backup.name) # List all backups for a database that contains a name. - print("All backups with database name containing \"{}\":".format(database_id)) + print('All backups with database name containing "{}":'.format(database_id)) for backup in instance.list_backups(filter_="database:{}".format(database_id)): print(backup.name) # List all backups that expire before a timestamp. expire_time = datetime.utcnow().replace(microsecond=0) + timedelta(days=30) - print("All backups with expire_time before \"{}-{}-{}T{}:{}:{}Z\":".format( - *expire_time.timetuple())) + print( + 'All backups with expire_time before "{}-{}-{}T{}:{}:{}Z":'.format( + *expire_time.timetuple() + ) + ) for backup in instance.list_backups( - filter_="expire_time < \"{}-{}-{}T{}:{}:{}Z\"".format( - *expire_time.timetuple())): + filter_='expire_time < "{}-{}-{}T{}:{}:{}Z"'.format(*expire_time.timetuple()) + ): print(backup.name) # List all backups with a size greater than some bytes. @@ -184,17 +203,24 @@ def list_backups(instance_id, database_id, backup_id): # List backups that were created after a timestamp that are also ready. create_time = datetime.utcnow().replace(microsecond=0) - timedelta(days=1) - print("All backups created after \"{}-{}-{}T{}:{}:{}Z\" and are READY:".format( - *create_time.timetuple())) + print( + 'All backups created after "{}-{}-{}T{}:{}:{}Z" and are READY:'.format( + *create_time.timetuple() + ) + ) for backup in instance.list_backups( - filter_="create_time >= \"{}-{}-{}T{}:{}:{}Z\" AND state:READY".format( - *create_time.timetuple())): + filter_='create_time >= "{}-{}-{}T{}:{}:{}Z" AND state:READY'.format( + *create_time.timetuple() + ) + ): print(backup.name) print("All backups with pagination") for page in instance.list_backups(page_size=2).pages: for backup in page: print(backup.name) + + # [END spanner_list_backups] @@ -216,6 +242,8 @@ def delete_backup(instance_id, backup_id): # Verify that the backup is deleted. assert backup.exists() is False print("Backup {} has been deleted.".format(backup.name)) + + # [END spanner_delete_backup] @@ -230,52 +258,57 @@ def update_backup(instance_id, backup_id): old_expire_time = backup.expire_time new_expire_time = old_expire_time + timedelta(days=30) backup.update_expire_time(new_expire_time) - print("Backup {} expire time was updated from {} to {}.".format( - backup.name, old_expire_time, new_expire_time)) + print( + "Backup {} expire time was updated from {} to {}.".format( + backup.name, old_expire_time, new_expire_time + ) + ) + + # [END spanner_update_backup] -if __name__ == '__main__': # noqa: C901 +if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - 'instance_id', help='Your Cloud Spanner instance ID.') + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - '--database-id', help='Your Cloud Spanner database ID.', - default='example_db') + "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + ) parser.add_argument( - '--backup-id', help='Your Cloud Spanner backup ID.', - default='example_backup') - - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('create_backup', help=create_backup.__doc__) - subparsers.add_parser('cancel_backup', help=cancel_backup.__doc__) - subparsers.add_parser('update_backup', help=update_backup.__doc__) - subparsers.add_parser('restore_database', help=restore_database.__doc__) - subparsers.add_parser('list_backups', help=list_backups.__doc__) - subparsers.add_parser('list_backup_operations', help=list_backup_operations.__doc__) - subparsers.add_parser('list_database_operations', - help=list_database_operations.__doc__) - subparsers.add_parser('delete_backup', help=delete_backup.__doc__) + "--backup-id", help="Your Cloud Spanner backup ID.", default="example_backup" + ) + + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("create_backup", help=create_backup.__doc__) + subparsers.add_parser("cancel_backup", help=cancel_backup.__doc__) + subparsers.add_parser("update_backup", help=update_backup.__doc__) + subparsers.add_parser("restore_database", help=restore_database.__doc__) + subparsers.add_parser("list_backups", help=list_backups.__doc__) + subparsers.add_parser("list_backup_operations", help=list_backup_operations.__doc__) + subparsers.add_parser( + "list_database_operations", help=list_database_operations.__doc__ + ) + subparsers.add_parser("delete_backup", help=delete_backup.__doc__) args = parser.parse_args() - if args.command == 'create_backup': + if args.command == "create_backup": create_backup(args.instance_id, args.database_id, args.backup_id) - elif args.command == 'cancel_backup': + elif args.command == "cancel_backup": cancel_backup(args.instance_id, args.database_id, args.backup_id) - elif args.command == 'update_backup': + elif args.command == "update_backup": update_backup(args.instance_id, args.backup_id) - elif args.command == 'restore_database': + elif args.command == "restore_database": restore_database(args.instance_id, args.database_id, args.backup_id) - elif args.command == 'list_backups': + elif args.command == "list_backups": list_backups(args.instance_id, args.database_id, args.backup_id) - elif args.command == 'list_backup_operations': + elif args.command == "list_backup_operations": list_backup_operations(args.instance_id, args.database_id) - elif args.command == 'list_database_operations': + elif args.command == "list_database_operations": list_database_operations(args.instance_id) - elif args.command == 'delete_backup': + elif args.command == "delete_backup": delete_backup(args.instance_id, args.backup_id) else: print("Command {} did not match expected commands.".format(args.command)) diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py index 51822a9b5e..5a87c39d9d 100644 --- a/samples/samples/backup_sample_test.py +++ b/samples/samples/backup_sample_test.py @@ -21,17 +21,17 @@ def unique_instance_id(): """ Creates a unique id for the database. """ - return f'test-instance-{uuid.uuid4().hex[:10]}' + return f"test-instance-{uuid.uuid4().hex[:10]}" def unique_database_id(): """ Creates a unique id for the database. """ - return f'test-db-{uuid.uuid4().hex[:10]}' + return f"test-db-{uuid.uuid4().hex[:10]}" def unique_backup_id(): """ Creates a unique id for the backup. """ - return f'test-backup-{uuid.uuid4().hex[:10]}' + return f"test-backup-{uuid.uuid4().hex[:10]}" INSTANCE_ID = unique_instance_id() @@ -40,11 +40,12 @@ def unique_backup_id(): BACKUP_ID = unique_backup_id() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spanner_instance(): spanner_client = spanner.Client() - instance_config = '{}/instanceConfigs/{}'.format( - spanner_client.project_name, 'regional-us-central1') + instance_config = "{}/instanceConfigs/{}".format( + spanner_client.project_name, "regional-us-central1" + ) instance = spanner_client.instance(INSTANCE_ID, instance_config) op = instance.create() op.result(120) # block until completion @@ -52,7 +53,7 @@ def spanner_instance(): instance.delete() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def database(spanner_instance): """ Creates a temporary database that is removed after testing. """ db = spanner_instance.database(DATABASE_ID) @@ -105,8 +106,7 @@ def test_cancel_backup(capsys): backup_sample.cancel_backup(INSTANCE_ID, DATABASE_ID, BACKUP_ID) out, _ = capsys.readouterr() cancel_success = "Backup creation was successfully cancelled." in out - cancel_failure = ( - ("Backup was created before the cancel completed." in out) and - ("Backup deleted." in out) + cancel_failure = ("Backup was created before the cancel completed." in out) and ( + "Backup deleted." in out ) assert cancel_success or cancel_failure diff --git a/samples/samples/batch_sample.py b/samples/samples/batch_sample.py index e54581853a..59f3c081ac 100644 --- a/samples/samples/batch_sample.py +++ b/samples/samples/batch_sample.py @@ -44,9 +44,9 @@ def run_batch_query(instance_id, database_id): # Create the batch transaction and generate partitions snapshot = database.batch_snapshot() partitions = snapshot.generate_read_batches( - table='Singers', - columns=('SingerId', 'FirstName', 'LastName',), - keyset=spanner.KeySet(all_=True) + table="Singers", + columns=("SingerId", "FirstName", "LastName"), + keyset=spanner.KeySet(all_=True), ) # Create a pool of workers for the tasks @@ -57,7 +57,7 @@ def run_batch_query(instance_id, database_id): for future in concurrent.futures.as_completed(futures, timeout=3600): finish, row_ct = future.result() elapsed = finish - start - print(u'Completed {} rows in {} seconds'.format(row_ct, elapsed)) + print(u"Completed {} rows in {} seconds".format(row_ct, elapsed)) # Clean up snapshot.close() @@ -65,24 +65,23 @@ def run_batch_query(instance_id, database_id): def process(snapshot, partition): """Processes the requests of a query in an separate process.""" - print('Started processing partition.') + print("Started processing partition.") row_ct = 0 for row in snapshot.process_read_batch(partition): - print(u'SingerId: {}, AlbumId: {}, AlbumTitle: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) row_ct += 1 return time.time(), row_ct # [END spanner_batch_client] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - 'instance_id', help='Your Cloud Spanner instance ID.') + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - 'database_id', help='Your Cloud Spanner database ID.', - default='example_db') + "database_id", help="Your Cloud Spanner database ID.", default="example_db" + ) args = parser.parse_args() diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py new file mode 100644 index 0000000000..5660f08be4 --- /dev/null +++ b/samples/samples/noxfile.py @@ -0,0 +1,222 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/samples/samples/quickstart.py b/samples/samples/quickstart.py index 75125839d2..f19c5f48b2 100644 --- a/samples/samples/quickstart.py +++ b/samples/samples/quickstart.py @@ -24,25 +24,25 @@ def run_quickstart(): spanner_client = spanner.Client() # Your Cloud Spanner instance ID. - instance_id = 'my-instance-id' + instance_id = "my-instance-id" # Get a Cloud Spanner instance by ID. instance = spanner_client.instance(instance_id) # Your Cloud Spanner database ID. - database_id = 'my-database-id' + database_id = "my-database-id" # Get a Cloud Spanner database by ID. database = instance.database(database_id) # Execute a simple SQL statement. with database.snapshot() as snapshot: - results = snapshot.execute_sql('SELECT 1') + results = snapshot.execute_sql("SELECT 1") for row in results: print(row) # [END spanner_quickstart] -if __name__ == '__main__': +if __name__ == "__main__": run_quickstart() diff --git a/samples/samples/quickstart_test.py b/samples/samples/quickstart_test.py index de20872503..d5c8d04160 100644 --- a/samples/samples/quickstart_test.py +++ b/samples/samples/quickstart_test.py @@ -20,7 +20,7 @@ import quickstart -SPANNER_INSTANCE = os.environ['SPANNER_INSTANCE'] +SPANNER_INSTANCE = os.environ["SPANNER_INSTANCE"] @pytest.fixture @@ -31,9 +31,8 @@ def new_instance(self, unused_instance_name): return original_instance(self, SPANNER_INSTANCE) instance_patch = mock.patch( - 'google.cloud.spanner.Client.instance', - side_effect=new_instance, - autospec=True) + "google.cloud.spanner.Client.instance", side_effect=new_instance, autospec=True + ) with instance_patch: yield @@ -43,7 +42,7 @@ def new_instance(self, unused_instance_name): def example_database(): spanner_client = spanner.Client() instance = spanner_client.instance(SPANNER_INSTANCE) - database = instance.database('my-database-id') + database = instance.database("my-database-id") if not database.exists(): database.create() @@ -54,4 +53,4 @@ def example_database(): def test_quickstart(capsys, patch_instance, example_database): quickstart.run_quickstart() out, _ = capsys.readouterr() - assert '[1]' in out + assert "[1]" in out diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 9c0eb3d64b..1a2c8d60e6 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -46,10 +46,12 @@ def create_instance(instance_id): operation = instance.create() - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Created instance {}'.format(instance_id)) + print("Created instance {}".format(instance_id)) + + # [END spanner_create_instance] @@ -59,28 +61,32 @@ def create_database(instance_id, database_id): spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) - database = instance.database(database_id, ddl_statements=[ - """CREATE TABLE Singers ( + database = instance.database( + database_id, + ddl_statements=[ + """CREATE TABLE Singers ( SingerId INT64 NOT NULL, FirstName STRING(1024), LastName STRING(1024), SingerInfo BYTES(MAX) ) PRIMARY KEY (SingerId)""", - """CREATE TABLE Albums ( + """CREATE TABLE Albums ( SingerId INT64 NOT NULL, AlbumId INT64 NOT NULL, AlbumTitle STRING(MAX) ) PRIMARY KEY (SingerId, AlbumId), - INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" - ]) + INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", + ], + ) operation = database.create() - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Created database {} on instance {}'.format( - database_id, instance_id)) + print("Created database {} on instance {}".format(database_id, instance_id)) + + # [END spanner_create_database] @@ -97,26 +103,32 @@ def insert_data(instance_id, database_id): with database.batch() as batch: batch.insert( - table='Singers', - columns=('SingerId', 'FirstName', 'LastName',), + table="Singers", + columns=("SingerId", "FirstName", "LastName"), values=[ - (1, u'Marc', u'Richards'), - (2, u'Catalina', u'Smith'), - (3, u'Alice', u'Trentor'), - (4, u'Lea', u'Martin'), - (5, u'David', u'Lomond')]) + (1, u"Marc", u"Richards"), + (2, u"Catalina", u"Smith"), + (3, u"Alice", u"Trentor"), + (4, u"Lea", u"Martin"), + (5, u"David", u"Lomond"), + ], + ) batch.insert( - table='Albums', - columns=('SingerId', 'AlbumId', 'AlbumTitle',), + table="Albums", + columns=("SingerId", "AlbumId", "AlbumTitle"), values=[ - (1, 1, u'Total Junk'), - (1, 2, u'Go, Go, Go'), - (2, 1, u'Green'), - (2, 2, u'Forever Hold Your Peace'), - (2, 3, u'Terrified')]) + (1, 1, u"Total Junk"), + (1, 2, u"Go, Go, Go"), + (2, 1, u"Green"), + (2, 2, u"Forever Hold Your Peace"), + (2, 3, u"Terrified"), + ], + ) + + print("Inserted data.") + - print('Inserted data.') # [END spanner_insert_data] @@ -132,24 +144,24 @@ def delete_data(instance_id, database_id): database = instance.database(database_id) # Delete individual rows - albums_to_delete = spanner.KeySet( - keys=[[2, 1], [2, 3]]) + albums_to_delete = spanner.KeySet(keys=[[2, 1], [2, 3]]) # Delete a range of rows where the column key is >=3 and <5 singers_range = spanner.KeyRange(start_closed=[3], end_open=[5]) - singers_to_delete = spanner.KeySet( - ranges=[singers_range]) + singers_to_delete = spanner.KeySet(ranges=[singers_range]) # Delete remaining Singers rows, which will also delete the remaining # Albums rows because Albums was defined with ON DELETE CASCADE remaining_singers = spanner.KeySet(all_=True) with database.batch() as batch: - batch.delete('Albums', albums_to_delete) - batch.delete('Singers', singers_to_delete) - batch.delete('Singers', remaining_singers) + batch.delete("Albums", albums_to_delete) + batch.delete("Singers", singers_to_delete) + batch.delete("Singers", remaining_singers) + + print("Deleted data.") + - print('Deleted data.') # [END spanner_delete_data] @@ -162,10 +174,13 @@ def query_data(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT SingerId, AlbumId, AlbumTitle FROM Albums') + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + ) for row in results: - print(u'SingerId: {}, AlbumId: {}, AlbumTitle: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + # [END spanner_query_data] @@ -179,12 +194,13 @@ def read_data(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table='Albums', - columns=('SingerId', 'AlbumId', 'AlbumTitle',), - keyset=keyset,) + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + ) for row in results: - print(u'SingerId: {}, AlbumId: {}, AlbumTitle: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + # [END spanner_read_data] @@ -202,13 +218,15 @@ def read_stale_data(instance_id, database_id): with database.snapshot(exact_staleness=staleness) as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table='Albums', - columns=('SingerId', 'AlbumId', 'MarketingBudget',), - keyset=keyset) + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + keyset=keyset, + ) for row in results: - print(u'SingerId: {}, AlbumId: {}, MarketingBudget: {}'.format( - *row)) + print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + + # [END spanner_read_stale_data] @@ -228,11 +246,13 @@ def query_data_with_new_column(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT SingerId, AlbumId, MarketingBudget FROM Albums') + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" + ) for row in results: - print( - u'SingerId: {}, AlbumId: {}, MarketingBudget: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + + # [END spanner_query_data_with_new_column] @@ -243,19 +263,23 @@ def add_index(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - operation = database.update_ddl([ - 'CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)']) + operation = database.update_ddl( + ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] + ) - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Added the AlbumsByAlbumTitle index.') + print("Added the AlbumsByAlbumTitle index.") + + # [END spanner_create_index] # [START spanner_query_data_with_index] def query_data_with_index( - instance_id, database_id, start_title='Aardvark', end_title='Goo'): + instance_id, database_id, start_title="Aardvark", end_title="Goo" +): """Queries sample data from the database using SQL and an index. The index must exist before running this sample. You can add the index @@ -277,13 +301,10 @@ def query_data_with_index( instance = spanner_client.instance(instance_id) database = instance.database(database_id) - params = { - 'start_title': start_title, - 'end_title': end_title - } + params = {"start_title": start_title, "end_title": end_title} param_types = { - 'start_title': type_pb2.Type(code=type_pb2.STRING), - 'end_title': type_pb2.Type(code=type_pb2.STRING) + "start_title": type_pb2.Type(code=type_pb2.STRING), + "end_title": type_pb2.Type(code=type_pb2.STRING), } with database.snapshot() as snapshot: @@ -291,12 +312,14 @@ def query_data_with_index( "SELECT AlbumId, AlbumTitle, MarketingBudget " "FROM Albums@{FORCE_INDEX=AlbumsByAlbumTitle} " "WHERE AlbumTitle >= @start_title AND AlbumTitle < @end_title", - params=params, param_types=param_types) + params=params, + param_types=param_types, + ) for row in results: - print( - u'AlbumId: {}, AlbumTitle: {}, ' - 'MarketingBudget: {}'.format(*row)) + print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + + # [END spanner_query_data_with_index] @@ -318,13 +341,16 @@ def read_data_with_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table='Albums', - columns=('AlbumId', 'AlbumTitle'), + table="Albums", + columns=("AlbumId", "AlbumTitle"), keyset=keyset, - index='AlbumsByAlbumTitle') + index="AlbumsByAlbumTitle", + ) for row in results: - print('AlbumId: {}, AlbumTitle: {}'.format(*row)) + print("AlbumId: {}, AlbumTitle: {}".format(*row)) + + # [END spanner_read_data_with_index] @@ -335,14 +361,19 @@ def add_storing_index(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - operation = database.update_ddl([ - 'CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)' - 'STORING (MarketingBudget)']) + operation = database.update_ddl( + [ + "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" + "STORING (MarketingBudget)" + ] + ) - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Added the AlbumsByAlbumTitle2 index.') + print("Added the AlbumsByAlbumTitle2 index.") + + # [END spanner_create_storing_index] @@ -366,15 +397,16 @@ def read_data_with_storing_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table='Albums', - columns=('AlbumId', 'AlbumTitle', 'MarketingBudget'), + table="Albums", + columns=("AlbumId", "AlbumTitle", "MarketingBudget"), keyset=keyset, - index='AlbumsByAlbumTitle2') + index="AlbumsByAlbumTitle2", + ) for row in results: - print( - u'AlbumId: {}, AlbumTitle: {}, ' - 'MarketingBudget: {}'.format(*row)) + print(u"AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + + # [END spanner_read_data_with_storing_index] @@ -385,13 +417,16 @@ def add_column(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - operation = database.update_ddl([ - 'ALTER TABLE Albums ADD COLUMN MarketingBudget INT64']) + operation = database.update_ddl( + ["ALTER TABLE Albums ADD COLUMN MarketingBudget INT64"] + ) - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Added the MarketingBudget column.') + print("Added the MarketingBudget column.") + + # [END spanner_add_column] @@ -412,14 +447,14 @@ def update_data(instance_id, database_id): with database.batch() as batch: batch.update( - table='Albums', - columns=( - 'SingerId', 'AlbumId', 'MarketingBudget'), - values=[ - (1, 1, 100000), - (2, 2, 500000)]) + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, 100000), (2, 2, 500000)], + ) + + print("Updated data.") + - print('Updated data.') # [END spanner_update_data] @@ -443,8 +478,11 @@ def update_albums(transaction): # Read the second album budget. second_album_keyset = spanner.KeySet(keys=[(2, 2)]) second_album_result = transaction.read( - table='Albums', columns=('MarketingBudget',), - keyset=second_album_keyset, limit=1) + table="Albums", + columns=("MarketingBudget",), + keyset=second_album_keyset, + limit=1, + ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -453,14 +491,16 @@ def update_albums(transaction): if second_album_budget < transfer_amount: # Raising an exception will automatically roll back the # transaction. - raise ValueError( - 'The second album doesn\'t have enough funds to transfer') + raise ValueError("The second album doesn't have enough funds to transfer") # Read the first album's budget. first_album_keyset = spanner.KeySet(keys=[(1, 1)]) first_album_result = transaction.read( - table='Albums', columns=('MarketingBudget',), - keyset=first_album_keyset, limit=1) + table="Albums", + columns=("MarketingBudget",), + keyset=first_album_keyset, + limit=1, + ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -468,22 +508,22 @@ def update_albums(transaction): second_album_budget -= transfer_amount first_album_budget += transfer_amount print( - 'Setting first album\'s budget to {} and the second album\'s ' - 'budget to {}.'.format( - first_album_budget, second_album_budget)) + "Setting first album's budget to {} and the second album's " + "budget to {}.".format(first_album_budget, second_album_budget) + ) # Update the rows. transaction.update( - table='Albums', - columns=( - 'SingerId', 'AlbumId', 'MarketingBudget'), - values=[ - (1, 1, first_album_budget), - (2, 2, second_album_budget)]) + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], + ) database.run_in_transaction(update_albums) - print('Transaction complete.') + print("Transaction complete.") + + # [END spanner_read_write_transaction] @@ -501,24 +541,26 @@ def read_only_transaction(instance_id, database_id): with database.snapshot(multi_use=True) as snapshot: # Read using SQL. results = snapshot.execute_sql( - 'SELECT SingerId, AlbumId, AlbumTitle FROM Albums') + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + ) - print('Results from first read:') + print("Results from first read:") for row in results: - print(u'SingerId: {}, AlbumId: {}, AlbumTitle: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) # Perform another read using the `read` method. Even if the data # is updated in-between the reads, the snapshot ensures that both # return the same data. keyset = spanner.KeySet(all_=True) results = snapshot.read( - table='Albums', - columns=('SingerId', 'AlbumId', 'AlbumTitle',), - keyset=keyset,) + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + ) - print('Results from second read:') + print("Results from second read:") for row in results: - print(u'SingerId: {}, AlbumId: {}, AlbumTitle: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) + + # [END spanner_read_only_transaction] @@ -530,8 +572,9 @@ def create_table_with_timestamp(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - operation = database.update_ddl([ - """CREATE TABLE Performances ( + operation = database.update_ddl( + [ + """CREATE TABLE Performances ( SingerId INT64 NOT NULL, VenueId INT64 NOT NULL, EventDate Date, @@ -540,13 +583,19 @@ def create_table_with_timestamp(instance_id, database_id): OPTIONS(allow_commit_timestamp=true) ) PRIMARY KEY (SingerId, VenueId, EventDate), INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" - ]) + ] + ) - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Created Performances table on database {} on instance {}'.format( - database_id, instance_id)) + print( + "Created Performances table on database {} on instance {}".format( + database_id, instance_id + ) + ) + + # [END spanner_create_table_with_timestamp_column] @@ -561,16 +610,18 @@ def insert_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.insert( - table='Performances', - columns=( - 'SingerId', 'VenueId', 'EventDate', - 'Revenue', 'LastUpdateTime',), + table="Performances", + columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), values=[ (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), - (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP)]) + (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), + ], + ) + + print("Inserted data.") + - print('Inserted data.') # [END spanner_insert_data_with_timestamp_column] @@ -583,15 +634,23 @@ def add_timestamp_column(instance_id, database_id): database = instance.database(database_id) - operation = database.update_ddl([ - 'ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP ' - 'OPTIONS(allow_commit_timestamp=true)']) + operation = database.update_ddl( + [ + "ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP " + "OPTIONS(allow_commit_timestamp=true)" + ] + ) - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Altered table "Albums" on database {} on instance {}.'.format( - database_id, instance_id)) + print( + 'Altered table "Albums" on database {} on instance {}.'.format( + database_id, instance_id + ) + ) + + # [END spanner_add_timestamp_column] @@ -619,14 +678,17 @@ def update_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.update( - table='Albums', - columns=( - 'SingerId', 'AlbumId', 'MarketingBudget', 'LastUpdateTime'), + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), values=[ (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), - (2, 2, 750000, spanner.COMMIT_TIMESTAMP)]) + (2, 2, 750000, spanner.COMMIT_TIMESTAMP), + ], + ) + + print("Updated data.") + - print('Updated data.') # [END spanner_update_data_with_timestamp_column] @@ -650,11 +712,14 @@ def query_data_with_timestamp(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT SingerId, AlbumId, MarketingBudget FROM Albums ' - 'ORDER BY LastUpdateTime DESC') + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " + "ORDER BY LastUpdateTime DESC" + ) for row in results: - print(u'SingerId: {}, AlbumId: {}, MarketingBudget: {}'.format(*row)) + print(u"SingerId: {}, AlbumId: {}, MarketingBudget: {}".format(*row)) + + # [END spanner_query_data_with_timestamp_column] @@ -669,26 +734,32 @@ def write_struct_data(instance_id, database_id): with database.batch() as batch: batch.insert( - table='Singers', - columns=('SingerId', 'FirstName', 'LastName',), + table="Singers", + columns=("SingerId", "FirstName", "LastName"), values=[ - (6, u'Elena', u'Campbell'), - (7, u'Gabriel', u'Wright'), - (8, u'Benjamin', u'Martinez'), - (9, u'Hannah', u'Harris')]) + (6, u"Elena", u"Campbell"), + (7, u"Gabriel", u"Wright"), + (8, u"Benjamin", u"Martinez"), + (9, u"Hannah", u"Harris"), + ], + ) + + print("Inserted sample data for STRUCT queries") + - print('Inserted sample data for STRUCT queries') # [END spanner_write_data_for_struct_queries] def query_with_struct(instance_id, database_id): """Query a table using STRUCT parameters. """ # [START spanner_create_struct_with_data] - record_type = param_types.Struct([ - param_types.StructField('FirstName', param_types.STRING), - param_types.StructField('LastName', param_types.STRING) - ]) - record_value = ('Elena', 'Campbell') + record_type = param_types.Struct( + [ + param_types.StructField("FirstName", param_types.STRING), + param_types.StructField("LastName", param_types.STRING), + ] + ) + record_value = ("Elena", "Campbell") # [END spanner_create_struct_with_data] # [START spanner_query_data_with_struct] @@ -699,28 +770,33 @@ def query_with_struct(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId FROM Singers WHERE " - "(FirstName, LastName) = @name", - params={'name': record_value}, - param_types={'name': record_type}) + "SELECT SingerId FROM Singers WHERE " "(FirstName, LastName) = @name", + params={"name": record_value}, + param_types={"name": record_type}, + ) for row in results: - print(u'SingerId: {}'.format(*row)) + print(u"SingerId: {}".format(*row)) # [END spanner_query_data_with_struct] def query_with_array_of_struct(instance_id, database_id): """Query a table using an array of STRUCT parameters. """ # [START spanner_create_user_defined_struct] - name_type = param_types.Struct([ - param_types.StructField('FirstName', param_types.STRING), - param_types.StructField('LastName', param_types.STRING)]) + name_type = param_types.Struct( + [ + param_types.StructField("FirstName", param_types.STRING), + param_types.StructField("LastName", param_types.STRING), + ] + ) # [END spanner_create_user_defined_struct] # [START spanner_create_array_of_struct_with_data] - band_members = [("Elena", "Campbell"), - ("Gabriel", "Wright"), - ("Benjamin", "Martinez")] + band_members = [ + ("Elena", "Campbell"), + ("Gabriel", "Wright"), + ("Benjamin", "Martinez"), + ] # [END spanner_create_array_of_struct_with_data] # [START spanner_query_data_with_array_of_struct] @@ -733,11 +809,12 @@ def query_with_array_of_struct(instance_id, database_id): "SELECT SingerId FROM Singers WHERE " "STRUCT" "(FirstName, LastName) IN UNNEST(@names)", - params={'names': band_members}, - param_types={'names': param_types.Array(name_type)}) + params={"names": band_members}, + param_types={"names": param_types.Array(name_type)}, + ) for row in results: - print(u'SingerId: {}'.format(*row)) + print(u"SingerId: {}".format(*row)) # [END spanner_query_data_with_array_of_struct] @@ -748,20 +825,24 @@ def query_struct_field(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - name_type = param_types.Struct([ - param_types.StructField('FirstName', param_types.STRING), - param_types.StructField('LastName', param_types.STRING) - ]) + name_type = param_types.Struct( + [ + param_types.StructField("FirstName", param_types.STRING), + param_types.StructField("LastName", param_types.STRING), + ] + ) with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId FROM Singers " - "WHERE FirstName = @name.FirstName", - params={'name': ("Elena", "Campbell")}, - param_types={'name': name_type}) + "SELECT SingerId FROM Singers " "WHERE FirstName = @name.FirstName", + params={"name": ("Elena", "Campbell")}, + param_types={"name": name_type}, + ) for row in results: - print(u'SingerId: {}'.format(*row)) + print(u"SingerId: {}".format(*row)) + + # [START spanner_field_access_on_struct_parameters] @@ -772,21 +853,24 @@ def query_nested_struct_field(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - song_info_type = param_types.Struct([ - param_types.StructField('SongName', param_types.STRING), - param_types.StructField( - 'ArtistNames', param_types.Array( - param_types.Struct([ - param_types.StructField( - 'FirstName', param_types.STRING), - param_types.StructField( - 'LastName', param_types.STRING) - ]) - ) - ) - ]) + song_info_type = param_types.Struct( + [ + param_types.StructField("SongName", param_types.STRING), + param_types.StructField( + "ArtistNames", + param_types.Array( + param_types.Struct( + [ + param_types.StructField("FirstName", param_types.STRING), + param_types.StructField("LastName", param_types.STRING), + ] + ) + ), + ), + ] + ) - song_info = ('Imagination', [('Elena', 'Campbell'), ('Hannah', 'Harris')]) + song_info = ("Imagination", [("Elena", "Campbell"), ("Hannah", "Harris")]) with database.snapshot() as snapshot: results = snapshot.execute_sql( @@ -795,16 +879,14 @@ def query_nested_struct_field(instance_id, database_id): "STRUCT" "(FirstName, LastName) " "IN UNNEST(@song_info.ArtistNames)", - params={ - 'song_info': song_info - }, - param_types={ - 'song_info': song_info_type - } + params={"song_info": song_info}, + param_types={"song_info": song_info_type}, ) for row in results: - print(u'SingerId: {} SongName: {}'.format(*row)) + print(u"SingerId: {} SongName: {}".format(*row)) + + # [END spanner_field_access_on_nested_struct_parameters] @@ -936,19 +1018,21 @@ def update_data_with_dml_struct(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - record_type = param_types.Struct([ - param_types.StructField('FirstName', param_types.STRING), - param_types.StructField('LastName', param_types.STRING) - ]) - record_value = ('Timothy', 'Campbell') + record_type = param_types.Struct( + [ + param_types.StructField("FirstName", param_types.STRING), + param_types.StructField("LastName", param_types.STRING), + ] + ) + record_value = ("Timothy", "Campbell") def write_with_struct(transaction): row_ct = transaction.execute_update( "UPDATE Singers SET LastName = 'Grant' " "WHERE STRUCT" "(FirstName, LastName) = @name", - params={'name': record_value}, - param_types={'name': record_type} + params={"name": record_value}, + param_types={"name": record_type}, ) print("{} record(s) updated.".format(row_ct)) @@ -993,7 +1077,8 @@ def query_data_with_parameter(instance_id, database_id): "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = @lastName", params={"lastName": "Garcia"}, - param_types={"lastName": spanner.param_types.STRING}) + param_types={"lastName": spanner.param_types.STRING}, + ) for row in results: print(u"SingerId: {}, FirstName: {}, LastName: {}".format(*row)) @@ -1014,8 +1099,7 @@ def transfer_budget(transaction): # Transfer marketing budget from one album to another. Performed in a # single transaction to ensure that the transfer is atomic. second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 2 and AlbumId = 2" + "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -1042,7 +1126,7 @@ def transfer_budget(transaction): "SET MarketingBudget = @AlbumBudget " "WHERE SingerId = 1 and AlbumId = 1", params={"AlbumBudget": first_album_budget}, - param_types={"AlbumBudget": spanner.param_types.INT64} + param_types={"AlbumBudget": spanner.param_types.INT64}, ) # Update second album @@ -1051,11 +1135,14 @@ def transfer_budget(transaction): "SET MarketingBudget = @AlbumBudget " "WHERE SingerId = 2 and AlbumId = 2", params={"AlbumBudget": second_album_budget}, - param_types={"AlbumBudget": spanner.param_types.INT64} + param_types={"AlbumBudget": spanner.param_types.INT64}, ) - print("Transferred {} from Album2's budget to Album1's".format( - transfer_amount)) + print( + "Transferred {} from Album2's budget to Album1's".format( + transfer_amount + ) + ) database.run_in_transaction(transfer_budget) # [END spanner_dml_getting_started_update] @@ -1088,9 +1175,7 @@ def delete_data_with_partitioned_dml(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - row_ct = database.execute_partitioned_dml( - "DELETE FROM Singers WHERE SingerId > 10" - ) + row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") print("{} record(s) deleted.".format(row_ct)) # [END spanner_dml_partitioned_delete] @@ -1119,13 +1204,9 @@ def update_with_batch_dml(instance_id, database_id): ) def update_albums(transaction): - row_cts = transaction.batch_update([ - insert_statement, - update_statement, - ]) + row_cts = transaction.batch_update([insert_statement, update_statement]) - print("Executed {} SQL statements using Batch DML.".format( - len(row_cts))) + print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) database.run_in_transaction(update_albums) # [END spanner_dml_batch_update] @@ -1140,8 +1221,9 @@ def create_table_with_datatypes(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - operation = database.update_ddl([ - """CREATE TABLE Venues ( + operation = database.update_ddl( + [ + """CREATE TABLE Venues ( VenueId INT64 NOT NULL, VenueName STRING(100), VenueInfo BYTES(MAX), @@ -1153,13 +1235,17 @@ def create_table_with_datatypes(instance_id, database_id): LastUpdateTime TIMESTAMP NOT NULL OPTIONS(allow_commit_timestamp=true) ) PRIMARY KEY (VenueId)""" - ]) + ] + ) - print('Waiting for operation to complete...') + print("Waiting for operation to complete...") operation.result(120) - print('Created Venues table on database {} on instance {}'.format( - database_id, instance_id)) + print( + "Created Venues table on database {} on instance {}".format( + database_id, instance_id + ) + ) # [END spanner_create_table_with_datatypes] @@ -1172,28 +1258,64 @@ def insert_datatypes_data(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - exampleBytes1 = base64.b64encode(u'Hello World 1'.encode()) - exampleBytes2 = base64.b64encode(u'Hello World 2'.encode()) - exampleBytes3 = base64.b64encode(u'Hello World 3'.encode()) - available_dates1 = ['2020-12-01', '2020-12-02', '2020-12-03'] - available_dates2 = ['2020-11-01', '2020-11-05', '2020-11-15'] - available_dates3 = ['2020-10-01', '2020-10-07'] + exampleBytes1 = base64.b64encode(u"Hello World 1".encode()) + exampleBytes2 = base64.b64encode(u"Hello World 2".encode()) + exampleBytes3 = base64.b64encode(u"Hello World 3".encode()) + available_dates1 = ["2020-12-01", "2020-12-02", "2020-12-03"] + available_dates2 = ["2020-11-01", "2020-11-05", "2020-11-15"] + available_dates3 = ["2020-10-01", "2020-10-07"] with database.batch() as batch: batch.insert( - table='Venues', + table="Venues", columns=( - 'VenueId', 'VenueName', 'VenueInfo', 'Capacity', - 'AvailableDates', 'LastContactDate', 'OutdoorVenue', - 'PopularityScore', 'LastUpdateTime'), + "VenueId", + "VenueName", + "VenueInfo", + "Capacity", + "AvailableDates", + "LastContactDate", + "OutdoorVenue", + "PopularityScore", + "LastUpdateTime", + ), values=[ - (4, u'Venue 4', exampleBytes1, 1800, available_dates1, - '2018-09-02', False, 0.85543, spanner.COMMIT_TIMESTAMP), - (19, u'Venue 19', exampleBytes2, 6300, available_dates2, - '2019-01-15', True, 0.98716, spanner.COMMIT_TIMESTAMP), - (42, u'Venue 42', exampleBytes3, 3000, available_dates3, - '2018-10-01', False, 0.72598, spanner.COMMIT_TIMESTAMP)]) - - print('Inserted data.') + ( + 4, + u"Venue 4", + exampleBytes1, + 1800, + available_dates1, + "2018-09-02", + False, + 0.85543, + spanner.COMMIT_TIMESTAMP, + ), + ( + 19, + u"Venue 19", + exampleBytes2, + 6300, + available_dates2, + "2019-01-15", + True, + 0.98716, + spanner.COMMIT_TIMESTAMP, + ), + ( + 42, + u"Venue 42", + exampleBytes3, + 3000, + available_dates3, + "2018-10-01", + False, + 0.72598, + spanner.COMMIT_TIMESTAMP, + ), + ], + ) + + print("Inserted data.") # [END spanner_insert_datatypes_data] @@ -1206,24 +1328,21 @@ def query_data_with_array(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - exampleArray = ['2020-10-01', '2020-11-01'] - param = { - 'available_dates': exampleArray - } - param_type = { - 'available_dates': param_types.Array(param_types.DATE) - } + exampleArray = ["2020-10-01", "2020-11-01"] + param = {"available_dates": exampleArray} + param_type = {"available_dates": param_types.Array(param_types.DATE)} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, AvailableDate FROM Venues v,' - 'UNNEST(v.AvailableDates) as AvailableDate ' - 'WHERE AvailableDate in UNNEST(@available_dates)', - params=param, param_types=param_type) + "SELECT VenueId, VenueName, AvailableDate FROM Venues v," + "UNNEST(v.AvailableDates) as AvailableDate " + "WHERE AvailableDate in UNNEST(@available_dates)", + params=param, + param_types=param_type, + ) for row in results: - print(u"VenueId: {}, VenueName: {}, AvailableDate: {}".format( - *row)) + print(u"VenueId: {}, VenueName: {}, AvailableDate: {}".format(*row)) # [END spanner_query_with_array_parameter] @@ -1237,18 +1356,16 @@ def query_data_with_bool(instance_id, database_id): database = instance.database(database_id) exampleBool = True - param = { - 'outdoor_venue': exampleBool - } - param_type = { - 'outdoor_venue': param_types.BOOL - } + param = {"outdoor_venue": exampleBool} + param_type = {"outdoor_venue": param_types.BOOL} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, OutdoorVenue FROM Venues ' - 'WHERE OutdoorVenue = @outdoor_venue', - params=param, param_types=param_type) + "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " + "WHERE OutdoorVenue = @outdoor_venue", + params=param, + param_types=param_type, + ) for row in results: print(u"VenueId: {}, VenueName: {}, OutdoorVenue: {}".format(*row)) @@ -1264,19 +1381,16 @@ def query_data_with_bytes(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - exampleBytes = base64.b64encode(u'Hello World 1'.encode()) - param = { - 'venue_info': exampleBytes - } - param_type = { - 'venue_info': param_types.BYTES - } + exampleBytes = base64.b64encode(u"Hello World 1".encode()) + param = {"venue_info": exampleBytes} + param_type = {"venue_info": param_types.BYTES} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName FROM Venues ' - 'WHERE VenueInfo = @venue_info', - params=param, param_types=param_type) + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = @venue_info", + params=param, + param_types=param_type, + ) for row in results: print(u"VenueId: {}, VenueName: {}".format(*row)) @@ -1292,23 +1406,20 @@ def query_data_with_date(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - exampleDate = '2019-01-01' - param = { - 'last_contact_date': exampleDate - } - param_type = { - 'last_contact_date': param_types.DATE - } + exampleDate = "2019-01-01" + param = {"last_contact_date": exampleDate} + param_type = {"last_contact_date": param_types.DATE} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, LastContactDate FROM Venues ' - 'WHERE LastContactDate < @last_contact_date', - params=param, param_types=param_type) + "SELECT VenueId, VenueName, LastContactDate FROM Venues " + "WHERE LastContactDate < @last_contact_date", + params=param, + param_types=param_type, + ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastContactDate: {}".format( - *row)) + print(u"VenueId: {}, VenueName: {}, LastContactDate: {}".format(*row)) # [END spanner_query_with_date_parameter] @@ -1322,22 +1433,19 @@ def query_data_with_float(instance_id, database_id): database = instance.database(database_id) exampleFloat = 0.8 - param = { - 'popularity_score': exampleFloat - } - param_type = { - 'popularity_score': param_types.FLOAT64 - } + param = {"popularity_score": exampleFloat} + param_type = {"popularity_score": param_types.FLOAT64} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, PopularityScore FROM Venues ' - 'WHERE PopularityScore > @popularity_score', - params=param, param_types=param_type) + "SELECT VenueId, VenueName, PopularityScore FROM Venues " + "WHERE PopularityScore > @popularity_score", + params=param, + param_types=param_type, + ) for row in results: - print(u"VenueId: {}, VenueName: {}, PopularityScore: {}".format( - *row)) + print(u"VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) # [END spanner_query_with_float_parameter] @@ -1351,18 +1459,16 @@ def query_data_with_int(instance_id, database_id): database = instance.database(database_id) exampleInt = 3000 - param = { - 'capacity': exampleInt - } - param_type = { - 'capacity': param_types.INT64 - } + param = {"capacity": exampleInt} + param_type = {"capacity": param_types.INT64} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, Capacity FROM Venues ' - 'WHERE Capacity >= @capacity', - params=param, param_types=param_type) + "SELECT VenueId, VenueName, Capacity FROM Venues " + "WHERE Capacity >= @capacity", + params=param, + param_types=param_type, + ) for row in results: print(u"VenueId: {}, VenueName: {}, Capacity: {}".format(*row)) @@ -1379,18 +1485,15 @@ def query_data_with_string(instance_id, database_id): database = instance.database(database_id) exampleString = "Venue 42" - param = { - 'venue_name': exampleString - } - param_type = { - 'venue_name': param_types.STRING - } + param = {"venue_name": exampleString} + param_type = {"venue_name": param_types.STRING} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName FROM Venues ' - 'WHERE VenueName = @venue_name', - params=param, param_types=param_type) + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = @venue_name", + params=param, + param_types=param_type, + ) for row in results: print(u"VenueId: {}, VenueName: {}".format(*row)) @@ -1407,22 +1510,19 @@ def query_data_with_timestamp_parameter(instance_id, database_id): database = instance.database(database_id) example_timestamp = datetime.datetime.utcnow().isoformat() + "Z" - param = { - 'last_update_time': example_timestamp - } - param_type = { - 'last_update_time': param_types.TIMESTAMP - } + param = {"last_update_time": example_timestamp} + param_type = {"last_update_time": param_types.TIMESTAMP} with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, LastUpdateTime FROM Venues ' - 'WHERE LastUpdateTime < @last_update_time', - params=param, param_types=param_type) + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " + "WHERE LastUpdateTime < @last_update_time", + params=param, + param_types=param_type, + ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format( - *row)) + print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) # [END spanner_query_with_timestamp_parameter] @@ -1437,13 +1537,12 @@ def query_data_with_query_options(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, LastUpdateTime FROM Venues', - query_options={'optimizer_version': '1'} + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", + query_options={"optimizer_version": "1"}, ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format( - *row)) + print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) # [END spanner_query_with_query_options] @@ -1452,248 +1551,229 @@ def create_client_with_query_options(instance_id, database_id): # [START spanner_create_client_with_query_options] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - spanner_client = spanner.Client( - query_options={'optimizer_version': '1'} - ) + spanner_client = spanner.Client(query_options={"optimizer_version": "1"}) instance = spanner_client.instance(instance_id) database = instance.database(database_id) with database.snapshot() as snapshot: results = snapshot.execute_sql( - 'SELECT VenueId, VenueName, LastUpdateTime FROM Venues' + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" ) for row in results: - print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format( - *row)) + print(u"VenueId: {}, VenueName: {}, LastUpdateTime: {}".format(*row)) # [END spanner_create_client_with_query_options] -if __name__ == '__main__': # noqa: C901 +if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - 'instance_id', help='Your Cloud Spanner instance ID.') + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - '--database-id', help='Your Cloud Spanner database ID.', - default='example_db') - - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('create_instance', help=create_instance.__doc__) - subparsers.add_parser('create_database', help=create_database.__doc__) - subparsers.add_parser('insert_data', help=insert_data.__doc__) - subparsers.add_parser('delete_data', help=delete_data.__doc__) - subparsers.add_parser('query_data', help=query_data.__doc__) - subparsers.add_parser('read_data', help=read_data.__doc__) - subparsers.add_parser('read_stale_data', help=read_stale_data.__doc__) - subparsers.add_parser('add_column', help=add_column.__doc__) - subparsers.add_parser('update_data', help=update_data.__doc__) - subparsers.add_parser( - 'query_data_with_new_column', help=query_data_with_new_column.__doc__) - subparsers.add_parser( - 'read_write_transaction', help=read_write_transaction.__doc__) + "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + ) + + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("create_instance", help=create_instance.__doc__) + subparsers.add_parser("create_database", help=create_database.__doc__) + subparsers.add_parser("insert_data", help=insert_data.__doc__) + subparsers.add_parser("delete_data", help=delete_data.__doc__) + subparsers.add_parser("query_data", help=query_data.__doc__) + subparsers.add_parser("read_data", help=read_data.__doc__) + subparsers.add_parser("read_stale_data", help=read_stale_data.__doc__) + subparsers.add_parser("add_column", help=add_column.__doc__) + subparsers.add_parser("update_data", help=update_data.__doc__) subparsers.add_parser( - 'read_only_transaction', help=read_only_transaction.__doc__) - subparsers.add_parser('add_index', help=add_index.__doc__) + "query_data_with_new_column", help=query_data_with_new_column.__doc__ + ) + subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) + subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) + subparsers.add_parser("add_index", help=add_index.__doc__) query_data_with_index_parser = subparsers.add_parser( - 'query_data_with_index', help=query_data_with_index.__doc__) - query_data_with_index_parser.add_argument( - '--start_title', default='Aardvark') - query_data_with_index_parser.add_argument( - '--end_title', default='Goo') - subparsers.add_parser('read_data_with_index', help=insert_data.__doc__) - subparsers.add_parser('add_storing_index', help=add_storing_index.__doc__) - subparsers.add_parser( - 'read_data_with_storing_index', help=insert_data.__doc__) - subparsers.add_parser( - 'create_table_with_timestamp', - help=create_table_with_timestamp.__doc__) - subparsers.add_parser( - 'insert_data_with_timestamp', help=insert_data_with_timestamp.__doc__) - subparsers.add_parser( - 'add_timestamp_column', help=add_timestamp_column.__doc__) - subparsers.add_parser( - 'update_data_with_timestamp', help=update_data_with_timestamp.__doc__) - subparsers.add_parser( - 'query_data_with_timestamp', help=query_data_with_timestamp.__doc__) - subparsers.add_parser('write_struct_data', help=write_struct_data.__doc__) - subparsers.add_parser('query_with_struct', help=query_with_struct.__doc__) - subparsers.add_parser( - 'query_with_array_of_struct', help=query_with_array_of_struct.__doc__) - subparsers.add_parser( - 'query_struct_field', help=query_struct_field.__doc__) - subparsers.add_parser( - 'query_nested_struct_field', help=query_nested_struct_field.__doc__) - subparsers.add_parser( - 'insert_data_with_dml', help=insert_data_with_dml.__doc__) - subparsers.add_parser( - 'update_data_with_dml', help=update_data_with_dml.__doc__) - subparsers.add_parser( - 'delete_data_with_dml', help=delete_data_with_dml.__doc__) - subparsers.add_parser( - 'update_data_with_dml_timestamp', - help=update_data_with_dml_timestamp.__doc__) - subparsers.add_parser( - 'dml_write_read_transaction', - help=dml_write_read_transaction.__doc__) - subparsers.add_parser( - 'update_data_with_dml_struct', - help=update_data_with_dml_struct.__doc__) - subparsers.add_parser('insert_with_dml', help=insert_with_dml.__doc__) + "query_data_with_index", help=query_data_with_index.__doc__ + ) + query_data_with_index_parser.add_argument("--start_title", default="Aardvark") + query_data_with_index_parser.add_argument("--end_title", default="Goo") + subparsers.add_parser("read_data_with_index", help=insert_data.__doc__) + subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) + subparsers.add_parser("read_data_with_storing_index", help=insert_data.__doc__) subparsers.add_parser( - 'query_data_with_parameter', help=query_data_with_parameter.__doc__) + "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ + ) subparsers.add_parser( - 'write_with_dml_transaction', help=write_with_dml_transaction.__doc__) + "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ + ) + subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) subparsers.add_parser( - 'update_data_with_partitioned_dml', - help=update_data_with_partitioned_dml.__doc__) + "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ + ) subparsers.add_parser( - 'delete_data_with_partitioned_dml', - help=delete_data_with_partitioned_dml.__doc__) + "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ + ) + subparsers.add_parser("write_struct_data", help=write_struct_data.__doc__) + subparsers.add_parser("query_with_struct", help=query_with_struct.__doc__) subparsers.add_parser( - 'update_with_batch_dml', - help=update_with_batch_dml.__doc__) + "query_with_array_of_struct", help=query_with_array_of_struct.__doc__ + ) + subparsers.add_parser("query_struct_field", help=query_struct_field.__doc__) subparsers.add_parser( - 'create_table_with_datatypes', - help=create_table_with_datatypes.__doc__) + "query_nested_struct_field", help=query_nested_struct_field.__doc__ + ) + subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) + subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) subparsers.add_parser( - 'insert_datatypes_data', - help=insert_datatypes_data.__doc__) + "update_data_with_dml_timestamp", help=update_data_with_dml_timestamp.__doc__ + ) subparsers.add_parser( - 'query_data_with_array', - help=query_data_with_array.__doc__) + "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ + ) subparsers.add_parser( - 'query_data_with_bool', - help=query_data_with_bool.__doc__) + "update_data_with_dml_struct", help=update_data_with_dml_struct.__doc__ + ) + subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) subparsers.add_parser( - 'query_data_with_bytes', - help=query_data_with_bytes.__doc__) + "query_data_with_parameter", help=query_data_with_parameter.__doc__ + ) subparsers.add_parser( - 'query_data_with_date', - help=query_data_with_date.__doc__) + "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ + ) subparsers.add_parser( - 'query_data_with_float', - help=query_data_with_float.__doc__) + "update_data_with_partitioned_dml", + help=update_data_with_partitioned_dml.__doc__, + ) subparsers.add_parser( - 'query_data_with_int', - help=query_data_with_int.__doc__) + "delete_data_with_partitioned_dml", + help=delete_data_with_partitioned_dml.__doc__, + ) + subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) subparsers.add_parser( - 'query_data_with_string', - help=query_data_with_string.__doc__) + "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ + ) + subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) + subparsers.add_parser("query_data_with_array", help=query_data_with_array.__doc__) + subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) + subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) + subparsers.add_parser("query_data_with_date", help=query_data_with_date.__doc__) + subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) + subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) + subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) subparsers.add_parser( - 'query_data_with_timestamp_parameter', - help=query_data_with_timestamp_parameter.__doc__) + "query_data_with_timestamp_parameter", + help=query_data_with_timestamp_parameter.__doc__, + ) subparsers.add_parser( - 'query_data_with_query_options', - help=query_data_with_query_options.__doc__) + "query_data_with_query_options", help=query_data_with_query_options.__doc__ + ) subparsers.add_parser( - 'create_client_with_query_options', - help=create_client_with_query_options.__doc__) + "create_client_with_query_options", + help=create_client_with_query_options.__doc__, + ) args = parser.parse_args() - if args.command == 'create_instance': + if args.command == "create_instance": create_instance(args.instance_id) - elif args.command == 'create_database': + elif args.command == "create_database": create_database(args.instance_id, args.database_id) - elif args.command == 'insert_data': + elif args.command == "insert_data": insert_data(args.instance_id, args.database_id) - elif args.command == 'delete_data': + elif args.command == "delete_data": delete_data(args.instance_id, args.database_id) - elif args.command == 'query_data': + elif args.command == "query_data": query_data(args.instance_id, args.database_id) - elif args.command == 'read_data': + elif args.command == "read_data": read_data(args.instance_id, args.database_id) - elif args.command == 'read_stale_data': + elif args.command == "read_stale_data": read_stale_data(args.instance_id, args.database_id) - elif args.command == 'add_column': + elif args.command == "add_column": add_column(args.instance_id, args.database_id) - elif args.command == 'update_data': + elif args.command == "update_data": update_data(args.instance_id, args.database_id) - elif args.command == 'query_data_with_new_column': + elif args.command == "query_data_with_new_column": query_data_with_new_column(args.instance_id, args.database_id) - elif args.command == 'read_write_transaction': + elif args.command == "read_write_transaction": read_write_transaction(args.instance_id, args.database_id) - elif args.command == 'read_only_transaction': + elif args.command == "read_only_transaction": read_only_transaction(args.instance_id, args.database_id) - elif args.command == 'add_index': + elif args.command == "add_index": add_index(args.instance_id, args.database_id) - elif args.command == 'query_data_with_index': + elif args.command == "query_data_with_index": query_data_with_index( - args.instance_id, args.database_id, - args.start_title, args.end_title) - elif args.command == 'read_data_with_index': + args.instance_id, args.database_id, args.start_title, args.end_title + ) + elif args.command == "read_data_with_index": read_data_with_index(args.instance_id, args.database_id) - elif args.command == 'add_storing_index': + elif args.command == "add_storing_index": add_storing_index(args.instance_id, args.database_id) - elif args.command == 'read_data_with_storing_index': + elif args.command == "read_data_with_storing_index": read_data_with_storing_index(args.instance_id, args.database_id) - elif args.command == 'create_table_with_timestamp': + elif args.command == "create_table_with_timestamp": create_table_with_timestamp(args.instance_id, args.database_id) - elif args.command == 'insert_data_with_timestamp': + elif args.command == "insert_data_with_timestamp": insert_data_with_timestamp(args.instance_id, args.database_id) - elif args.command == 'add_timestamp_column': + elif args.command == "add_timestamp_column": add_timestamp_column(args.instance_id, args.database_id) - elif args.command == 'update_data_with_timestamp': + elif args.command == "update_data_with_timestamp": update_data_with_timestamp(args.instance_id, args.database_id) - elif args.command == 'query_data_with_timestamp': + elif args.command == "query_data_with_timestamp": query_data_with_timestamp(args.instance_id, args.database_id) - elif args.command == 'write_struct_data': + elif args.command == "write_struct_data": write_struct_data(args.instance_id, args.database_id) - elif args.command == 'query_with_struct': + elif args.command == "query_with_struct": query_with_struct(args.instance_id, args.database_id) - elif args.command == 'query_with_array_of_struct': + elif args.command == "query_with_array_of_struct": query_with_array_of_struct(args.instance_id, args.database_id) - elif args.command == 'query_struct_field': + elif args.command == "query_struct_field": query_struct_field(args.instance_id, args.database_id) - elif args.command == 'query_nested_struct_field': + elif args.command == "query_nested_struct_field": query_nested_struct_field(args.instance_id, args.database_id) - elif args.command == 'insert_data_with_dml': + elif args.command == "insert_data_with_dml": insert_data_with_dml(args.instance_id, args.database_id) - elif args.command == 'update_data_with_dml': + elif args.command == "update_data_with_dml": update_data_with_dml(args.instance_id, args.database_id) - elif args.command == 'delete_data_with_dml': + elif args.command == "delete_data_with_dml": delete_data_with_dml(args.instance_id, args.database_id) - elif args.command == 'update_data_with_dml_timestamp': + elif args.command == "update_data_with_dml_timestamp": update_data_with_dml_timestamp(args.instance_id, args.database_id) - elif args.command == 'dml_write_read_transaction': + elif args.command == "dml_write_read_transaction": dml_write_read_transaction(args.instance_id, args.database_id) - elif args.command == 'update_data_with_dml_struct': + elif args.command == "update_data_with_dml_struct": update_data_with_dml_struct(args.instance_id, args.database_id) - elif args.command == 'insert_with_dml': + elif args.command == "insert_with_dml": insert_with_dml(args.instance_id, args.database_id) - elif args.command == 'query_data_with_parameter': + elif args.command == "query_data_with_parameter": query_data_with_parameter(args.instance_id, args.database_id) - elif args.command == 'write_with_dml_transaction': + elif args.command == "write_with_dml_transaction": write_with_dml_transaction(args.instance_id, args.database_id) - elif args.command == 'update_data_with_partitioned_dml': + elif args.command == "update_data_with_partitioned_dml": update_data_with_partitioned_dml(args.instance_id, args.database_id) - elif args.command == 'delete_data_with_partitioned_dml': + elif args.command == "delete_data_with_partitioned_dml": delete_data_with_partitioned_dml(args.instance_id, args.database_id) - elif args.command == 'update_with_batch_dml': + elif args.command == "update_with_batch_dml": update_with_batch_dml(args.instance_id, args.database_id) - elif args.command == 'create_table_with_datatypes': + elif args.command == "create_table_with_datatypes": create_table_with_datatypes(args.instance_id, args.database_id) - elif args.command == 'insert_datatypes_data': + elif args.command == "insert_datatypes_data": insert_datatypes_data(args.instance_id, args.database_id) - elif args.command == 'query_data_with_array': + elif args.command == "query_data_with_array": query_data_with_array(args.instance_id, args.database_id) - elif args.command == 'query_data_with_bool': + elif args.command == "query_data_with_bool": query_data_with_bool(args.instance_id, args.database_id) - elif args.command == 'query_data_with_bytes': + elif args.command == "query_data_with_bytes": query_data_with_bytes(args.instance_id, args.database_id) - elif args.command == 'query_data_with_date': + elif args.command == "query_data_with_date": query_data_with_date(args.instance_id, args.database_id) - elif args.command == 'query_data_with_float': + elif args.command == "query_data_with_float": query_data_with_float(args.instance_id, args.database_id) - elif args.command == 'query_data_with_int': + elif args.command == "query_data_with_int": query_data_with_int(args.instance_id, args.database_id) - elif args.command == 'query_data_with_string': + elif args.command == "query_data_with_string": query_data_with_string(args.instance_id, args.database_id) - elif args.command == 'query_data_with_timestamp_parameter': + elif args.command == "query_data_with_timestamp_parameter": query_data_with_timestamp_parameter(args.instance_id, args.database_id) - elif args.command == 'query_data_with_query_options': + elif args.command == "query_data_with_query_options": query_data_with_query_options(args.instance_id, args.database_id) - elif args.command == 'create_client_with_query_options': + elif args.command == "create_client_with_query_options": create_client_with_query_options(args.instance_id, args.database_id) diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 0ec3ef42b7..a62a3d90aa 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -23,19 +23,19 @@ def unique_instance_id(): """ Creates a unique id for the database. """ - return f'test-instance-{uuid.uuid4().hex[:10]}' + return f"test-instance-{uuid.uuid4().hex[:10]}" def unique_database_id(): """ Creates a unique id for the database. """ - return f'test-db-{uuid.uuid4().hex[:10]}' + return f"test-db-{uuid.uuid4().hex[:10]}" INSTANCE_ID = unique_instance_id() DATABASE_ID = unique_database_id() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def spanner_instance(): snippets.create_instance(INSTANCE_ID) spanner_client = spanner.Client() @@ -44,7 +44,7 @@ def spanner_instance(): instance.delete() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def database(spanner_instance): """ Creates a temporary database that is removed after testing. """ snippets.create_database(INSTANCE_ID, DATABASE_ID) @@ -66,32 +66,32 @@ def test_create_database(database): def test_insert_data(capsys): snippets.insert_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Inserted data' in out + assert "Inserted data" in out def test_delete_data(capsys): snippets.delete_data(INSTANCE_ID, DATABASE_ID) snippets.insert_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Deleted data' in out + assert "Deleted data" in out def test_query_data(capsys): snippets.query_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk' in out + assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out def test_add_column(capsys): snippets.add_column(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Added the MarketingBudget column.' in out + assert "Added the MarketingBudget column." in out def test_read_data(capsys): snippets.read_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk' in out + assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out def test_update_data(capsys): @@ -101,7 +101,7 @@ def test_update_data(capsys): snippets.update_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Updated data.' in out + assert "Updated data." in out def test_read_stale_data(capsys): @@ -109,61 +109,61 @@ def test_read_stale_data(capsys): # at least 15 seconds after the previous insert snippets.read_stale_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 1, AlbumId: 1, MarketingBudget: None' in out + assert "SingerId: 1, AlbumId: 1, MarketingBudget: None" in out def test_read_write_transaction(capsys): snippets.read_write_transaction(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Transaction complete' in out + assert "Transaction complete" in out def test_query_data_with_new_column(capsys): snippets.query_data_with_new_column(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 1, AlbumId: 1, MarketingBudget: 300000' in out - assert 'SingerId: 2, AlbumId: 2, MarketingBudget: 300000' in out + assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out + assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out def test_add_index(capsys): snippets.add_index(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Added the AlbumsByAlbumTitle index' in out + assert "Added the AlbumsByAlbumTitle index" in out def test_query_data_with_index(capsys): snippets.query_data_with_index(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Go, Go, Go' in out - assert 'Forever Hold Your Peace' in out - assert 'Green' not in out + assert "Go, Go, Go" in out + assert "Forever Hold Your Peace" in out + assert "Green" not in out def test_read_data_with_index(capsys): snippets.read_data_with_index(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Go, Go, Go' in out - assert 'Forever Hold Your Peace' in out - assert 'Green' in out + assert "Go, Go, Go" in out + assert "Forever Hold Your Peace" in out + assert "Green" in out def test_add_storing_index(capsys): snippets.add_storing_index(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Added the AlbumsByAlbumTitle2 index.' in out + assert "Added the AlbumsByAlbumTitle2 index." in out def test_read_data_with_storing_index(capsys): snippets.read_data_with_storing_index(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '300000' in out + assert "300000" in out def test_read_only_transaction(capsys): snippets.read_only_transaction(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() # Snippet does two reads, so entry should be listed twice - assert out.count('SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk') == 2 + assert out.count("SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk") == 2 def test_add_timestamp_column(capsys): @@ -175,108 +175,108 @@ def test_add_timestamp_column(capsys): def test_update_data_with_timestamp(capsys): snippets.update_data_with_timestamp(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Updated data' in out + assert "Updated data" in out def test_query_data_with_timestamp(capsys): snippets.query_data_with_timestamp(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 1, AlbumId: 1, MarketingBudget: 1000000' in out - assert 'SingerId: 2, AlbumId: 2, MarketingBudget: 750000' in out + assert "SingerId: 1, AlbumId: 1, MarketingBudget: 1000000" in out + assert "SingerId: 2, AlbumId: 2, MarketingBudget: 750000" in out def test_create_table_with_timestamp(capsys): snippets.create_table_with_timestamp(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Created Performances table on database' in out + assert "Created Performances table on database" in out def test_insert_data_with_timestamp(capsys): snippets.insert_data_with_timestamp(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Inserted data.' in out + assert "Inserted data." in out def test_write_struct_data(capsys): snippets.write_struct_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Inserted sample data for STRUCT queries' in out + assert "Inserted sample data for STRUCT queries" in out def test_query_with_struct(capsys): snippets.query_with_struct(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 6' in out + assert "SingerId: 6" in out def test_query_with_array_of_struct(capsys): snippets.query_with_array_of_struct(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 8' in out - assert 'SingerId: 7' in out - assert 'SingerId: 6' in out + assert "SingerId: 8" in out + assert "SingerId: 7" in out + assert "SingerId: 6" in out def test_query_struct_field(capsys): snippets.query_struct_field(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 6' in out + assert "SingerId: 6" in out def test_query_nested_struct_field(capsys): snippets.query_nested_struct_field(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 6 SongName: Imagination' in out - assert 'SingerId: 9 SongName: Imagination' in out + assert "SingerId: 6 SongName: Imagination" in out + assert "SingerId: 9 SongName: Imagination" in out def test_insert_data_with_dml(capsys): snippets.insert_data_with_dml(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '1 record(s) inserted.' in out + assert "1 record(s) inserted." in out def test_update_data_with_dml(capsys): snippets.update_data_with_dml(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '1 record(s) updated.' in out + assert "1 record(s) updated." in out def test_delete_data_with_dml(capsys): snippets.delete_data_with_dml(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '1 record(s) deleted.' in out + assert "1 record(s) deleted." in out def test_update_data_with_dml_timestamp(capsys): snippets.update_data_with_dml_timestamp(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '2 record(s) updated.' in out + assert "2 record(s) updated." in out def test_dml_write_read_transaction(capsys): snippets.dml_write_read_transaction(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '1 record(s) inserted.' in out - assert 'FirstName: Timothy, LastName: Campbell' in out + assert "1 record(s) inserted." in out + assert "FirstName: Timothy, LastName: Campbell" in out def test_update_data_with_dml_struct(capsys): snippets.update_data_with_dml_struct(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '1 record(s) updated' in out + assert "1 record(s) updated" in out def test_insert_with_dml(capsys): snippets.insert_with_dml(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert '4 record(s) inserted' in out + assert "4 record(s) inserted" in out def test_query_data_with_parameter(capsys): snippets.query_data_with_parameter(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'SingerId: 12, FirstName: Melissa, LastName: Garcia' in out + assert "SingerId: 12, FirstName: Melissa, LastName: Garcia" in out def test_write_with_dml_transaction(capsys): @@ -306,60 +306,59 @@ def update_with_batch_dml(capsys): def test_create_table_with_datatypes(capsys): snippets.create_table_with_datatypes(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Created Venues table on database' in out + assert "Created Venues table on database" in out def test_insert_datatypes_data(capsys): snippets.insert_datatypes_data(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'Inserted data.' in out + assert "Inserted data." in out def test_query_data_with_array(capsys): snippets.query_data_with_array(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01' in out - assert 'VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01' in out + assert "VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01" in out + assert "VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01" in out def test_query_data_with_bool(capsys): snippets.query_data_with_bool(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 19, VenueName: Venue 19, OutdoorVenue: True' in out + assert "VenueId: 19, VenueName: Venue 19, OutdoorVenue: True" in out def test_query_data_with_bytes(capsys): snippets.query_data_with_bytes(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 4, VenueName: Venue 4' in out + assert "VenueId: 4, VenueName: Venue 4" in out def test_query_data_with_date(capsys): snippets.query_data_with_date(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02' in out - assert 'VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01' \ - in out + assert "VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02" in out + assert "VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01" in out def test_query_data_with_float(capsys): snippets.query_data_with_float(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8' in out - assert 'VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9' in out + assert "VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8" in out + assert "VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9" in out def test_query_data_with_int(capsys): snippets.query_data_with_int(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 19, VenueName: Venue 19, Capacity: 6300' in out - assert 'VenueId: 42, VenueName: Venue 42, Capacity: 3000' in out + assert "VenueId: 19, VenueName: Venue 19, Capacity: 6300" in out + assert "VenueId: 42, VenueName: Venue 42, Capacity: 3000" in out def test_query_data_with_string(capsys): snippets.query_data_with_string(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 42, VenueName: Venue 42' in out + assert "VenueId: 42, VenueName: Venue 42" in out def test_query_data_with_timestamp_parameter(capsys): @@ -368,22 +367,22 @@ def test_query_data_with_timestamp_parameter(capsys): time.sleep(5) snippets.query_data_with_timestamp_parameter(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 4, VenueName: Venue 4, LastUpdateTime:' in out - assert 'VenueId: 19, VenueName: Venue 19, LastUpdateTime:' in out - assert 'VenueId: 42, VenueName: Venue 42, LastUpdateTime:' in out + assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out + assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out + assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out def test_query_data_with_query_options(capsys): snippets.query_data_with_query_options(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 4, VenueName: Venue 4, LastUpdateTime:' in out - assert 'VenueId: 19, VenueName: Venue 19, LastUpdateTime:' in out - assert 'VenueId: 42, VenueName: Venue 42, LastUpdateTime:' in out + assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out + assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out + assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out def test_create_client_with_query_options(capsys): snippets.create_client_with_query_options(INSTANCE_ID, DATABASE_ID) out, _ = capsys.readouterr() - assert 'VenueId: 4, VenueName: Venue 4, LastUpdateTime:' in out - assert 'VenueId: 19, VenueName: Venue 19, LastUpdateTime:' in out - assert 'VenueId: 42, VenueName: Venue 42, LastUpdateTime:' in out + assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out + assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out + assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out diff --git a/synth.metadata b/synth.metadata index 3618f8cff9..df4ded371b 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,52 +3,22 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-spanner.git", - "sha": "edfefc8aa2e74e0366b0f9208896c5637f1a0b11" + "remote": "git@github.com:larkee/python-spanner.git", + "sha": "3e54af6f8582e9620afb704e1d08994eab12c365" } }, { "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3474dc892349674efda09d74b3a574765d996188", - "internalRef": "321098618" + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "2686d7075fa456972bf4d08680d99617f5eb32b1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4f2c9f752a94042472fc03c5bd9e06e89817d2bd" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "spanner", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "spanner_admin_instance", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "spanner_admin_database", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" + "sha": "2686d7075fa456972bf4d08680d99617f5eb32b1" } } ] diff --git a/synth.py b/synth.py index 7f9540f72b..bf0c2f1b63 100644 --- a/synth.py +++ b/synth.py @@ -15,6 +15,7 @@ """This script is used to synthesize generated parts of this library.""" import synthtool as s from synthtool import gcp +from synthtool.languages import python gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() @@ -151,7 +152,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=99) +templated_files = common.py_library(unit_cov_level=97, cov_level=99, samples=True) s.move(templated_files, excludes=["noxfile.py"]) # Template's MANIFEST.in does not include the needed GAPIC config file. @@ -171,4 +172,10 @@ "\n\g<0>", ) +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- + +python.py_samples() + s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 3585a1fa90a063eefab01707bfa675755c525385 Mon Sep 17 00:00:00 2001 From: larkee Date: Mon, 27 Jul 2020 15:35:55 +1000 Subject: [PATCH 74/74] fix lint --- samples/samples/batch_sample.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samples/samples/batch_sample.py b/samples/samples/batch_sample.py index 59f3c081ac..553dc31517 100644 --- a/samples/samples/batch_sample.py +++ b/samples/samples/batch_sample.py @@ -71,6 +71,8 @@ def process(snapshot, partition): print(u"SingerId: {}, AlbumId: {}, AlbumTitle: {}".format(*row)) row_ct += 1 return time.time(), row_ct + + # [END spanner_batch_client]