diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py index bd531898eb29..cd7cf5018e1f 100644 --- a/bigquery/samples/add_empty_column.py +++ b/bigquery/samples/add_empty_column.py @@ -13,13 +13,13 @@ # limitations under the License. -def add_empty_column(client, table_id): +def add_empty_column(table_id): # [START bigquery_add_empty_column] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table # to add an empty column. diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py index 78d1d351a7a7..29a1c2ff61e0 100644 --- a/bigquery/samples/browse_table_data.py +++ b/bigquery/samples/browse_table_data.py @@ -13,14 +13,14 @@ # limitations under the License. -def browse_table_data(client, table_id): +def browse_table_data(table_id): # [START bigquery_browse_table] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table to browse data rows. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_list_jobs.py b/bigquery/samples/client_list_jobs.py index 08eb4fbd99ef..b2344e23c7f7 100644 --- a/bigquery/samples/client_list_jobs.py +++ b/bigquery/samples/client_list_jobs.py @@ -13,16 +13,16 @@ # limitations under the License. -def client_list_jobs(client): +def client_list_jobs(): # [START bigquery_list_jobs] - # TODO(developer): Import the client library. - # from google.cloud import bigquery + + from google.cloud import bigquery import datetime - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # List the 10 most recent jobs in reverse chronological order. # Omit the max_results parameter to list jobs from the past 6 months. diff --git a/bigquery/samples/client_load_partitioned_table.py b/bigquery/samples/client_load_partitioned_table.py index 3f9f86db9a9a..e4e8a296c9a3 100644 --- a/bigquery/samples/client_load_partitioned_table.py +++ b/bigquery/samples/client_load_partitioned_table.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_load_partitioned_table(client, table_id): +def client_load_partitioned_table(table_id): # [START bigquery_load_table_partitioned] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table to create. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_query.py b/bigquery/samples/client_query.py index 5242c854e220..7fedc3f90b1e 100644 --- a/bigquery/samples/client_query.py +++ b/bigquery/samples/client_query.py @@ -13,14 +13,14 @@ # limitations under the License. -def client_query(client): +def client_query(): # [START bigquery_query] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() query = """ SELECT name, SUM(number) as total_people diff --git a/bigquery/samples/client_query_add_column.py b/bigquery/samples/client_query_add_column.py index c35548d2a361..ff7d5aa68add 100644 --- a/bigquery/samples/client_query_add_column.py +++ b/bigquery/samples/client_query_add_column.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_add_column(client, table_id): +def client_query_add_column(table_id): # [START bigquery_add_column_query_append] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py index af9fcd8a1e40..e1680f4a18d2 100644 --- a/bigquery/samples/client_query_batch.py +++ b/bigquery/samples/client_query_batch.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_batch(client): +def client_query_batch(): # [START bigquery_query_batch] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() job_config = bigquery.QueryJobConfig( # Run at batch priority, which won't count toward concurrent rate limit. diff --git a/bigquery/samples/client_query_destination_table.py b/bigquery/samples/client_query_destination_table.py index 876df7904d9c..303ce5a0cc36 100644 --- a/bigquery/samples/client_query_destination_table.py +++ b/bigquery/samples/client_query_destination_table.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_destination_table(client, table_id): +def client_query_destination_table(table_id): # [START bigquery_query_destination_table] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py index d3409eecd77d..24d4f22228c7 100644 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_destination_table_cmek(client, table_id, kms_key_name): +def client_query_destination_table_cmek(table_id, kms_key_name): # [START bigquery_query_destination_table_cmek] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_query_destination_table_legacy.py b/bigquery/samples/client_query_destination_table_legacy.py index 8e977a92d996..c8fdd606f9f4 100644 --- a/bigquery/samples/client_query_destination_table_legacy.py +++ b/bigquery/samples/client_query_destination_table_legacy.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_destination_table_legacy(client, table_id): +def client_query_destination_table_legacy(table_id): # [START bigquery_query_legacy_large_results] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_query_dry_run.py b/bigquery/samples/client_query_dry_run.py index 2d09a1c25f4a..1f7bd0c9c4e7 100644 --- a/bigquery/samples/client_query_dry_run.py +++ b/bigquery/samples/client_query_dry_run.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_dry_run(client): +def client_query_dry_run(): # [START bigquery_query_dry_run] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() job_config = bigquery.QueryJobConfig(dry_run=True, use_query_cache=False) diff --git a/bigquery/samples/client_query_legacy_sql.py b/bigquery/samples/client_query_legacy_sql.py index c8dae20649e2..3f94657795fb 100644 --- a/bigquery/samples/client_query_legacy_sql.py +++ b/bigquery/samples/client_query_legacy_sql.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_legacy_sql(client): +def client_query_legacy_sql(): # [START bigquery_query_legacy] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query = ( "SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] " diff --git a/bigquery/samples/client_query_relax_column.py b/bigquery/samples/client_query_relax_column.py index 8ec117e186fc..5e2ec8056a00 100644 --- a/bigquery/samples/client_query_relax_column.py +++ b/bigquery/samples/client_query_relax_column.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_relax_column(client, table_id): +def client_query_relax_column(table_id): # [START bigquery_relax_column_query_append] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/client_query_w_array_params.py b/bigquery/samples/client_query_w_array_params.py index 254173d4c540..4077be2c7d3e 100644 --- a/bigquery/samples/client_query_w_array_params.py +++ b/bigquery/samples/client_query_w_array_params.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_w_array_params(client): +def client_query_w_array_params(): # [START bigquery_query_params_arrays] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query = """ SELECT name, sum(number) as count diff --git a/bigquery/samples/client_query_w_named_params.py b/bigquery/samples/client_query_w_named_params.py index eba5bc221ff9..a0de8f63aa99 100644 --- a/bigquery/samples/client_query_w_named_params.py +++ b/bigquery/samples/client_query_w_named_params.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_w_named_params(client): +def client_query_w_named_params(): # [START bigquery_query_params_named] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query = """ SELECT word, word_count diff --git a/bigquery/samples/client_query_w_positional_params.py b/bigquery/samples/client_query_w_positional_params.py index 3f7ce584bcf9..ee316044bda3 100644 --- a/bigquery/samples/client_query_w_positional_params.py +++ b/bigquery/samples/client_query_w_positional_params.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_w_positional_params(client): +def client_query_w_positional_params(): # [START bigquery_query_params_positional] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query = """ SELECT word, word_count diff --git a/bigquery/samples/client_query_w_struct_params.py b/bigquery/samples/client_query_w_struct_params.py index 7c291447f0cb..041a3a0e3839 100644 --- a/bigquery/samples/client_query_w_struct_params.py +++ b/bigquery/samples/client_query_w_struct_params.py @@ -13,13 +13,13 @@ # limitations under the License. -def client_query_w_struct_params(client): +def client_query_w_struct_params(): # [START bigquery_query_params_structs] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query = "SELECT @struct_value AS s;" job_config = bigquery.QueryJobConfig( diff --git a/bigquery/samples/client_query_w_timestamp_params.py b/bigquery/samples/client_query_w_timestamp_params.py index cc334f7e9625..ca8eec0b5e89 100644 --- a/bigquery/samples/client_query_w_timestamp_params.py +++ b/bigquery/samples/client_query_w_timestamp_params.py @@ -13,7 +13,7 @@ # limitations under the License. -def client_query_w_timestamp_params(client): +def client_query_w_timestamp_params(): # [START bigquery_query_params_timestamps] import datetime @@ -21,8 +21,8 @@ def client_query_w_timestamp_params(client): import pytz from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);" job_config = bigquery.QueryJobConfig( diff --git a/bigquery/samples/copy_table.py b/bigquery/samples/copy_table.py index 20f6776cf87d..91c58e109cb9 100644 --- a/bigquery/samples/copy_table.py +++ b/bigquery/samples/copy_table.py @@ -13,14 +13,14 @@ # limitations under the License. -def copy_table(client, source_table_id, destination_table_id): +def copy_table(source_table_id, destination_table_id): # [START bigquery_copy_table] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set source_table_id to the ID of the original table. # source_table_id = "your-project.source_dataset.source_table" diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py index 1e9ee198c821..52ccb5f7b1df 100644 --- a/bigquery/samples/copy_table_cmek.py +++ b/bigquery/samples/copy_table_cmek.py @@ -13,13 +13,13 @@ # limitations under the License. -def copy_table_cmek(client, dest_table_id, orig_table_id, kms_key_name): +def copy_table_cmek(dest_table_id, orig_table_id, kms_key_name): # [START bigquery_copy_table_cmek] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dest_table_id to the ID of the destination table. # dest_table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/copy_table_multiple_source.py b/bigquery/samples/copy_table_multiple_source.py index 532ea0a0ab90..d86e380d0682 100644 --- a/bigquery/samples/copy_table_multiple_source.py +++ b/bigquery/samples/copy_table_multiple_source.py @@ -13,14 +13,14 @@ # limitations under the License. -def copy_table_multiple_source(client, dest_table_id, table_ids): +def copy_table_multiple_source(dest_table_id, table_ids): # [START bigquery_copy_table_multiple_source] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dest_table_id to the ID of the destination table. # dest_table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py index 3d64473a2321..e47d68a96b2a 100644 --- a/bigquery/samples/create_dataset.py +++ b/bigquery/samples/create_dataset.py @@ -13,13 +13,13 @@ # limitations under the License. -def create_dataset(client, dataset_id): +def create_dataset(dataset_id): # [START bigquery_create_dataset] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to create. # dataset_id = "{}.your_dataset".format(client.project) diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py index 4f7f27a8e668..feed04ca00e8 100644 --- a/bigquery/samples/create_job.py +++ b/bigquery/samples/create_job.py @@ -13,13 +13,13 @@ # limitations under the License. -def create_job(client): +def create_job(): # [START bigquery_create_job] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() query_job = client.query( "SELECT country_name from `bigquery-public-data.utility_us.country_code_iso`", diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py index 424ee4ef5553..d9b221a4f62b 100644 --- a/bigquery/samples/create_routine.py +++ b/bigquery/samples/create_routine.py @@ -13,14 +13,14 @@ # limitations under the License. -def create_routine(client, routine_id): +def create_routine(routine_id): # [START bigquery_create_routine] from google.cloud import bigquery from google.cloud import bigquery_v2 - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Choose a fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py index eb5af0388503..c191bd385041 100644 --- a/bigquery/samples/create_routine_ddl.py +++ b/bigquery/samples/create_routine_ddl.py @@ -13,14 +13,14 @@ # limitations under the License. -def create_routine_ddl(client, routine_id): +def create_routine_ddl(routine_id): # [START bigquery_create_routine_ddl] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Choose a fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py index ae26c57fed00..d62e86681afc 100644 --- a/bigquery/samples/create_table.py +++ b/bigquery/samples/create_table.py @@ -13,13 +13,13 @@ # limitations under the License. -def create_table(client, table_id): +def create_table(table_id): # [START bigquery_create_table] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table to create. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/create_table_range_partitioned.py b/bigquery/samples/create_table_range_partitioned.py index f9da09cff847..260041aa5d5d 100644 --- a/bigquery/samples/create_table_range_partitioned.py +++ b/bigquery/samples/create_table_range_partitioned.py @@ -13,13 +13,13 @@ # limitations under the License. -def create_table_range_partitioned(client, table_id): +def create_table_range_partitioned(table_id): # [START bigquery_create_table_range_partitioned] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table to create. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py index b8b53b8a4580..b4db9353b37f 100644 --- a/bigquery/samples/dataset_exists.py +++ b/bigquery/samples/dataset_exists.py @@ -13,11 +13,14 @@ # limitations under the License. -def dataset_exists(client, dataset_id): +def dataset_exists(dataset_id): # [START bigquery_dataset_exists] + from google.cloud import bigquery from google.cloud.exceptions import NotFound + client = bigquery.Client() + # TODO(developer): Set dataset_id to the ID of the dataset to determine existence. # dataset_id = "your-project.your_dataset" diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py index 8ce95d953392..e25740baaff0 100644 --- a/bigquery/samples/delete_dataset.py +++ b/bigquery/samples/delete_dataset.py @@ -13,14 +13,14 @@ # limitations under the License. -def delete_dataset(client, dataset_id): +def delete_dataset(dataset_id): # [START bigquery_delete_dataset] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set model_id to the ID of the model to fetch. # dataset_id = 'your-project.your_dataset' diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py index 9e6493694ddc..a52de2967e70 100644 --- a/bigquery/samples/delete_dataset_labels.py +++ b/bigquery/samples/delete_dataset_labels.py @@ -13,14 +13,14 @@ # limitations under the License. -def delete_dataset_labels(client, dataset_id): +def delete_dataset_labels(dataset_id): # [START bigquery_delete_label_dataset] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py index b6f32a59ebd9..0190315c6bed 100644 --- a/bigquery/samples/delete_model.py +++ b/bigquery/samples/delete_model.py @@ -13,15 +13,15 @@ # limitations under the License. -def delete_model(client, model_id): +def delete_model(model_id): """Sample ID: go/samples-tracker/1534""" # [START bigquery_delete_model] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py index c20b49837b75..679cbee4bc94 100644 --- a/bigquery/samples/delete_routine.py +++ b/bigquery/samples/delete_routine.py @@ -13,14 +13,14 @@ # limitations under the License. -def delete_routine(client, routine_id): +def delete_routine(routine_id): # [START bigquery_delete_routine] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py index b83a92890b09..3d0a6f0babc2 100644 --- a/bigquery/samples/delete_table.py +++ b/bigquery/samples/delete_table.py @@ -13,14 +13,14 @@ # limitations under the License. -def delete_table(client, table_id): +def delete_table(table_id): # [START bigquery_delete_table] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table to fetch. # table_id = 'your-project.your_dataset.your_table' diff --git a/bigquery/samples/download_public_data.py b/bigquery/samples/download_public_data.py index 815d140fc6f1..d10ed161a5da 100644 --- a/bigquery/samples/download_public_data.py +++ b/bigquery/samples/download_public_data.py @@ -13,14 +13,14 @@ # limitations under the License. -def download_public_data(client): +def download_public_data(): # [START bigquery_pandas_public_data] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the fully-qualified table ID in standard # SQL format, including the project ID and dataset ID. diff --git a/bigquery/samples/download_public_data_sandbox.py b/bigquery/samples/download_public_data_sandbox.py index edb1466e4bd7..afb50b15c3a9 100644 --- a/bigquery/samples/download_public_data_sandbox.py +++ b/bigquery/samples/download_public_data_sandbox.py @@ -13,14 +13,14 @@ # limitations under the License. -def download_public_data_sandbox(client): +def download_public_data_sandbox(): # [START bigquery_pandas_public_data_sandbox] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # `SELECT *` is an anti-pattern in BigQuery because it is cheaper and # faster to use the BigQuery Storage API directly, but BigQuery Sandbox diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py index bb3d4a0d4c40..54ba05781dd6 100644 --- a/bigquery/samples/get_dataset.py +++ b/bigquery/samples/get_dataset.py @@ -13,14 +13,14 @@ # limitations under the License. -def get_dataset(client, dataset_id): +def get_dataset(dataset_id): # [START bigquery_get_dataset] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py index 411607f84664..18a9ca985f51 100644 --- a/bigquery/samples/get_dataset_labels.py +++ b/bigquery/samples/get_dataset_labels.py @@ -13,14 +13,14 @@ # limitations under the License. -def get_dataset_labels(client, dataset_id): +def get_dataset_labels(dataset_id): # [START bigquery_get_dataset_labels] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py index 0ebd59c9d067..1570ef816895 100644 --- a/bigquery/samples/get_model.py +++ b/bigquery/samples/get_model.py @@ -13,15 +13,15 @@ # limitations under the License. -def get_model(client, model_id): +def get_model(model_id): """Sample ID: go/samples-tracker/1510""" # [START bigquery_get_model] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py index da4e89f57f19..72715ee1bcd7 100644 --- a/bigquery/samples/get_routine.py +++ b/bigquery/samples/get_routine.py @@ -13,14 +13,14 @@ # limitations under the License. -def get_routine(client, routine_id): +def get_routine(routine_id): # [START bigquery_get_routine] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py index 201b8808a846..0d1d809ba791 100644 --- a/bigquery/samples/get_table.py +++ b/bigquery/samples/get_table.py @@ -13,14 +13,14 @@ # limitations under the License. -def get_table(client, table_id): +def get_table(table_id): # [START bigquery_get_table] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table' diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py index 019b2aa374a0..bd4cd6721a57 100644 --- a/bigquery/samples/label_dataset.py +++ b/bigquery/samples/label_dataset.py @@ -13,14 +13,14 @@ # limitations under the License. -def label_dataset(client, dataset_id): +def label_dataset(dataset_id): # [START bigquery_label_dataset] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py index 77ae8c785d22..6a1b93d00bb6 100644 --- a/bigquery/samples/list_datasets.py +++ b/bigquery/samples/list_datasets.py @@ -13,14 +13,14 @@ # limitations under the License. -def list_datasets(client): +def list_datasets(): # [START bigquery_list_datasets] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() datasets = list(client.list_datasets()) # Make an API request. project = client.project diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py index 9fa939ad0c19..1b310049b4b3 100644 --- a/bigquery/samples/list_datasets_by_label.py +++ b/bigquery/samples/list_datasets_by_label.py @@ -13,14 +13,14 @@ # limitations under the License. -def list_datasets_by_label(client): +def list_datasets_by_label(): # [START bigquery_list_datasets_by_label] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() label_filter = "labels.color:green" datasets = list(client.list_datasets(filter=label_filter)) # Make an API request. diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py index a2477ffc795b..7251c001a770 100644 --- a/bigquery/samples/list_models.py +++ b/bigquery/samples/list_models.py @@ -13,15 +13,15 @@ # limitations under the License. -def list_models(client, dataset_id): +def list_models(dataset_id): """Sample ID: go/samples-tracker/1512""" # [START bigquery_list_models] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset that contains # the models you are listing. diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py index 5eaad0cec8f4..718d40d680aa 100644 --- a/bigquery/samples/list_routines.py +++ b/bigquery/samples/list_routines.py @@ -13,14 +13,14 @@ # limitations under the License. -def list_routines(client, dataset_id): +def list_routines(dataset_id): # [START bigquery_list_routines] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset that contains # the routines you are listing. diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py index d7576616e191..9ab527a4915f 100644 --- a/bigquery/samples/list_tables.py +++ b/bigquery/samples/list_tables.py @@ -13,14 +13,14 @@ # limitations under the License. -def list_tables(client, dataset_id): +def list_tables(dataset_id): # [START bigquery_list_tables] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset that contains # the tables you are listing. diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index 8cfb34424457..feaa4550bac9 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -13,15 +13,15 @@ # limitations under the License. -def load_table_dataframe(client, table_id): +def load_table_dataframe(table_id): # [START bigquery_load_table_dataframe] from google.cloud import bigquery import pandas - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the table to create. # table_id = "your-project.your_dataset.your_table_name" diff --git a/bigquery/samples/query_external_gcs_temporary_table.py b/bigquery/samples/query_external_gcs_temporary_table.py index 3ef44bd32db1..3c3caf695870 100644 --- a/bigquery/samples/query_external_gcs_temporary_table.py +++ b/bigquery/samples/query_external_gcs_temporary_table.py @@ -13,13 +13,13 @@ # limitations under the License. -def query_external_gcs_temporary_table(client): +def query_external_gcs_temporary_table(): # [START bigquery_query_external_gcs_temp] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # Configure the external data source and query job. external_config = bigquery.ExternalConfig("CSV") diff --git a/bigquery/samples/query_external_sheets_permanent_table.py b/bigquery/samples/query_external_sheets_permanent_table.py index e9bc908f5b15..915e9acc303a 100644 --- a/bigquery/samples/query_external_sheets_permanent_table.py +++ b/bigquery/samples/query_external_sheets_permanent_table.py @@ -28,7 +28,7 @@ def query_external_sheets_permanent_table(dataset_id): ] ) - # TODO(developer): Construct a BigQuery client object. + # Construct a BigQuery client object. client = bigquery.Client(credentials=credentials, project=project) # TODO(developer): Set dataset_id to the ID of the dataset to fetch. diff --git a/bigquery/samples/query_external_sheets_temporary_table.py b/bigquery/samples/query_external_sheets_temporary_table.py index 7b6bde864b09..1b70e9531f96 100644 --- a/bigquery/samples/query_external_sheets_temporary_table.py +++ b/bigquery/samples/query_external_sheets_temporary_table.py @@ -29,7 +29,7 @@ def query_external_sheets_temporary_table(): ] ) - # TODO(developer): Construct a BigQuery client object. + # Construct a BigQuery client object. client = bigquery.Client(credentials=credentials, project=project) # [END bigquery_auth_drive_scope] diff --git a/bigquery/samples/query_no_cache.py b/bigquery/samples/query_no_cache.py index 3d542a96b7be..e380f0b15de0 100644 --- a/bigquery/samples/query_no_cache.py +++ b/bigquery/samples/query_no_cache.py @@ -13,13 +13,13 @@ # limitations under the License. -def query_no_cache(client): +def query_no_cache(): # [START bigquery_query_no_cache] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() job_config = bigquery.QueryJobConfig(use_query_cache=False) sql = """ diff --git a/bigquery/samples/query_pagination.py b/bigquery/samples/query_pagination.py index b5d1999bfb3c..57a4212cf664 100644 --- a/bigquery/samples/query_pagination.py +++ b/bigquery/samples/query_pagination.py @@ -13,14 +13,14 @@ # limitations under the License. -def query_pagination(client): +def query_pagination(): # [START bigquery_query_pagination] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() query = """ SELECT name, SUM(number) as total_people diff --git a/bigquery/samples/query_script.py b/bigquery/samples/query_script.py index 453b7c6f9435..9390d352dd40 100644 --- a/bigquery/samples/query_script.py +++ b/bigquery/samples/query_script.py @@ -13,13 +13,13 @@ # limitations under the License. -def query_script(client): +def query_script(): # [START bigquery_query_script] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # Run a SQL script. sql_script = """ diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py index 4cc69d4e902a..4a57992d13c2 100644 --- a/bigquery/samples/query_to_arrow.py +++ b/bigquery/samples/query_to_arrow.py @@ -13,14 +13,14 @@ # limitations under the License. -def query_to_arrow(client): +def query_to_arrow(): # [START bigquery_query_to_arrow] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() sql = """ WITH races AS ( diff --git a/bigquery/samples/table_exists.py b/bigquery/samples/table_exists.py index a011e6e2915d..152d95534add 100644 --- a/bigquery/samples/table_exists.py +++ b/bigquery/samples/table_exists.py @@ -13,11 +13,14 @@ # limitations under the License. -def table_exists(client, table_id): +def table_exists(table_id): # [START bigquery_table_exists] + from google.cloud import bigquery from google.cloud.exceptions import NotFound + client = bigquery.Client() + # TODO(developer): Set table_id to the ID of the table to determine existence. # table_id = "your-project.your_dataset.your_table" diff --git a/bigquery/samples/table_insert_rows.py b/bigquery/samples/table_insert_rows.py index e2f949b635a6..130f9dbbddf2 100644 --- a/bigquery/samples/table_insert_rows.py +++ b/bigquery/samples/table_insert_rows.py @@ -13,14 +13,14 @@ # limitations under the License. -def table_insert_rows(client, table_id): +def table_insert_rows(table_id): # [START bigquery_table_insert_rows] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = "your-project.your_dataset.your_table" diff --git a/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py index 953e7e210312..2410ba1765fc 100644 --- a/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py +++ b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py @@ -13,14 +13,14 @@ # limitations under the License. -def table_insert_rows_explicit_none_insert_ids(client, table_id): +def table_insert_rows_explicit_none_insert_ids(table_id): # [START bigquery_table_insert_rows_explicit_none_insert_ids] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = "your-project.your_dataset.your_table" diff --git a/bigquery/samples/tests/conftest.py b/bigquery/samples/tests/conftest.py index 6d049e6c2312..d80085dd3425 100644 --- a/bigquery/samples/tests/conftest.py +++ b/bigquery/samples/tests/conftest.py @@ -15,19 +15,31 @@ import datetime import uuid +import google.auth +import mock import pytest from google.cloud import bigquery from google.cloud import bigquery_v2 -@pytest.fixture(scope="module") +@pytest.fixture(scope="session", autouse=True) def client(): - return bigquery.Client() + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) + real_client = bigquery.Client(credentials=credentials, project=project) + mock_client = mock.create_autospec(bigquery.Client) + mock_client.return_value = real_client + bigquery.Client = mock_client + return real_client @pytest.fixture -def random_table_id(client, dataset_id): +def random_table_id(dataset_id): now = datetime.datetime.now() random_table_id = "example_table_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] @@ -46,7 +58,7 @@ def random_dataset_id(client): @pytest.fixture -def random_routine_id(client, dataset_id): +def random_routine_id(dataset_id): now = datetime.datetime.now() random_routine_id = "example_routine_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] @@ -95,7 +107,7 @@ def table_with_schema_id(client, dataset_id): @pytest.fixture -def table_with_data_id(client): +def table_with_data_id(): return "bigquery-public-data.samples.shakespeare" diff --git a/bigquery/samples/tests/test_add_empty_column.py b/bigquery/samples/tests/test_add_empty_column.py index de51bfed7672..d89fcb6b7022 100644 --- a/bigquery/samples/tests/test_add_empty_column.py +++ b/bigquery/samples/tests/test_add_empty_column.py @@ -15,8 +15,8 @@ from .. import add_empty_column -def test_add_empty_column(capsys, client, table_id): +def test_add_empty_column(capsys, table_id): - add_empty_column.add_empty_column(client, table_id) + add_empty_column.add_empty_column(table_id) out, err = capsys.readouterr() assert "A new column has been added." in out diff --git a/bigquery/samples/tests/test_browse_table_data.py b/bigquery/samples/tests/test_browse_table_data.py index db9b867f5ab7..a5f647bdbda2 100644 --- a/bigquery/samples/tests/test_browse_table_data.py +++ b/bigquery/samples/tests/test_browse_table_data.py @@ -15,9 +15,9 @@ from .. import browse_table_data -def test_browse_table_data(capsys, client, table_with_data_id): +def test_browse_table_data(capsys, table_with_data_id): - browse_table_data.browse_table_data(client, table_with_data_id) + browse_table_data.browse_table_data(table_with_data_id) out, err = capsys.readouterr() assert "Downloaded 164656 rows from table {}".format(table_with_data_id) in out assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out diff --git a/bigquery/samples/tests/test_client_list_jobs.py b/bigquery/samples/tests/test_client_list_jobs.py index ada053239802..896950a8253f 100644 --- a/bigquery/samples/tests/test_client_list_jobs.py +++ b/bigquery/samples/tests/test_client_list_jobs.py @@ -18,10 +18,10 @@ def test_client_list_jobs(capsys, client): - job = create_job.create_job(client) + job = create_job.create_job() client.cancel_job(job.job_id) job.cancel() - client_list_jobs.client_list_jobs(client) + client_list_jobs.client_list_jobs() out, err = capsys.readouterr() assert "Started job: {}".format(job.job_id) in out assert "Last 10 jobs:" in out diff --git a/bigquery/samples/tests/test_client_load_partitioned_table.py b/bigquery/samples/tests/test_client_load_partitioned_table.py index 4e4c8811181f..f1d72a8587c6 100644 --- a/bigquery/samples/tests/test_client_load_partitioned_table.py +++ b/bigquery/samples/tests/test_client_load_partitioned_table.py @@ -15,8 +15,8 @@ from .. import client_load_partitioned_table -def test_client_load_partitioned_table(capsys, client, random_table_id): +def test_client_load_partitioned_table(capsys, random_table_id): - client_load_partitioned_table.client_load_partitioned_table(client, random_table_id) + client_load_partitioned_table.client_load_partitioned_table(random_table_id) out, err = capsys.readouterr() assert "Loaded 50 rows to table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query.py b/bigquery/samples/tests/test_client_query.py index e73e7e5a0eb4..810c46a17943 100644 --- a/bigquery/samples/tests/test_client_query.py +++ b/bigquery/samples/tests/test_client_query.py @@ -15,9 +15,9 @@ from .. import client_query -def test_client_query(capsys, client): +def test_client_query(capsys,): - client_query.client_query(client) + client_query.client_query() out, err = capsys.readouterr() assert "The query data:" in out assert "name=James, count=272793" in out diff --git a/bigquery/samples/tests/test_client_query_add_column.py b/bigquery/samples/tests/test_client_query_add_column.py index 67ac328d5518..254533f78778 100644 --- a/bigquery/samples/tests/test_client_query_add_column.py +++ b/bigquery/samples/tests/test_client_query_add_column.py @@ -17,7 +17,7 @@ from .. import client_query_add_column -def test_client_query_add_column(capsys, client, random_table_id): +def test_client_query_add_column(capsys, random_table_id, client): schema = [ bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), @@ -26,7 +26,7 @@ def test_client_query_add_column(capsys, client, random_table_id): client.create_table(bigquery.Table(random_table_id, schema=schema)) - client_query_add_column.client_query_add_column(client, random_table_id) + client_query_add_column.client_query_add_column(random_table_id) out, err = capsys.readouterr() assert "Table {} contains 2 columns".format(random_table_id) in out assert "Table {} now contains 3 columns".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query_batch.py b/bigquery/samples/tests/test_client_query_batch.py index 79197e4565c7..c5e19985dda9 100644 --- a/bigquery/samples/tests/test_client_query_batch.py +++ b/bigquery/samples/tests/test_client_query_batch.py @@ -15,8 +15,8 @@ from .. import client_query_batch -def test_client_query_batch(capsys, client): +def test_client_query_batch(capsys,): - job = client_query_batch.client_query_batch(client) + job = client_query_batch.client_query_batch() out, err = capsys.readouterr() assert "Job {} is currently in state {}".format(job.job_id, job.state) in out diff --git a/bigquery/samples/tests/test_client_query_destination_table.py b/bigquery/samples/tests/test_client_query_destination_table.py index d29aaebd3ce5..6bcdd498a215 100644 --- a/bigquery/samples/tests/test_client_query_destination_table.py +++ b/bigquery/samples/tests/test_client_query_destination_table.py @@ -15,8 +15,8 @@ from .. import client_query_destination_table -def test_client_query_destination_table(capsys, client, table_id): +def test_client_query_destination_table(capsys, table_id): - client_query_destination_table.client_query_destination_table(client, table_id) + client_query_destination_table.client_query_destination_table(table_id) out, err = capsys.readouterr() assert "Query results loaded to the table {}".format(table_id) in out diff --git a/bigquery/samples/tests/test_client_query_destination_table_cmek.py b/bigquery/samples/tests/test_client_query_destination_table_cmek.py index cd4532be6d1d..4f9e3bc9a944 100644 --- a/bigquery/samples/tests/test_client_query_destination_table_cmek.py +++ b/bigquery/samples/tests/test_client_query_destination_table_cmek.py @@ -15,12 +15,10 @@ from .. import client_query_destination_table_cmek -def test_client_query_destination_table_cmek( - capsys, client, random_table_id, kms_key_name -): +def test_client_query_destination_table_cmek(capsys, random_table_id, kms_key_name): client_query_destination_table_cmek.client_query_destination_table_cmek( - client, random_table_id, kms_key_name + random_table_id, kms_key_name ) out, err = capsys.readouterr() assert "The destination table is written using the encryption configuration" in out diff --git a/bigquery/samples/tests/test_client_query_destination_table_legacy.py b/bigquery/samples/tests/test_client_query_destination_table_legacy.py index da62baada213..46077497b1c7 100644 --- a/bigquery/samples/tests/test_client_query_destination_table_legacy.py +++ b/bigquery/samples/tests/test_client_query_destination_table_legacy.py @@ -15,10 +15,10 @@ from .. import client_query_destination_table_legacy -def test_client_query_destination_table_legacy(capsys, client, random_table_id): +def test_client_query_destination_table_legacy(capsys, random_table_id): client_query_destination_table_legacy.client_query_destination_table_legacy( - client, random_table_id + random_table_id ) out, err = capsys.readouterr() assert "Query results loaded to the table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query_dry_run.py b/bigquery/samples/tests/test_client_query_dry_run.py index c39a22767d25..5cbf2e3fae6e 100644 --- a/bigquery/samples/tests/test_client_query_dry_run.py +++ b/bigquery/samples/tests/test_client_query_dry_run.py @@ -15,9 +15,9 @@ from .. import client_query_dry_run -def test_client_query_dry_run(capsys, client): +def test_client_query_dry_run(capsys,): - query_job = client_query_dry_run.client_query_dry_run(client) + query_job = client_query_dry_run.client_query_dry_run() out, err = capsys.readouterr() assert "This query will process" in out assert query_job.state == "DONE" diff --git a/bigquery/samples/tests/test_client_query_legacy_sql.py b/bigquery/samples/tests/test_client_query_legacy_sql.py index fb6ee60bc6ec..ab240fad1a1e 100644 --- a/bigquery/samples/tests/test_client_query_legacy_sql.py +++ b/bigquery/samples/tests/test_client_query_legacy_sql.py @@ -17,8 +17,8 @@ from .. import client_query_legacy_sql -def test_client_query_legacy_sql(capsys, client): +def test_client_query_legacy_sql(capsys,): - client_query_legacy_sql.client_query_legacy_sql(client) + client_query_legacy_sql.client_query_legacy_sql() out, err = capsys.readouterr() assert re.search(r"(Row[\w(){}:', ]+)$", out) diff --git a/bigquery/samples/tests/test_client_query_relax_column.py b/bigquery/samples/tests/test_client_query_relax_column.py index 685db9cb1fa0..0c5b7aa6f982 100644 --- a/bigquery/samples/tests/test_client_query_relax_column.py +++ b/bigquery/samples/tests/test_client_query_relax_column.py @@ -17,7 +17,7 @@ from .. import client_query_relax_column -def test_client_query_relax_column(capsys, client, random_table_id): +def test_client_query_relax_column(capsys, random_table_id, client): schema = [ bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), @@ -26,7 +26,7 @@ def test_client_query_relax_column(capsys, client, random_table_id): client.create_table(bigquery.Table(random_table_id, schema=schema)) - client_query_relax_column.client_query_relax_column(client, random_table_id) + client_query_relax_column.client_query_relax_column(random_table_id) out, err = capsys.readouterr() assert "2 fields in the schema are required." in out assert "0 fields in the schema are now required." in out diff --git a/bigquery/samples/tests/test_client_query_w_array_params.py b/bigquery/samples/tests/test_client_query_w_array_params.py index 8603e9b8fe3d..07e0294e93d1 100644 --- a/bigquery/samples/tests/test_client_query_w_array_params.py +++ b/bigquery/samples/tests/test_client_query_w_array_params.py @@ -15,8 +15,8 @@ from .. import client_query_w_array_params -def test_client_query_w_array_params(capsys, client): +def test_client_query_w_array_params(capsys,): - client_query_w_array_params.client_query_w_array_params(client) + client_query_w_array_params.client_query_w_array_params() out, err = capsys.readouterr() assert "James" in out diff --git a/bigquery/samples/tests/test_client_query_w_named_params.py b/bigquery/samples/tests/test_client_query_w_named_params.py index ae4a2fc27db3..2970dfdc47bd 100644 --- a/bigquery/samples/tests/test_client_query_w_named_params.py +++ b/bigquery/samples/tests/test_client_query_w_named_params.py @@ -15,8 +15,8 @@ from .. import client_query_w_named_params -def test_client_query_w_named_params(capsys, client): +def test_client_query_w_named_params(capsys,): - client_query_w_named_params.client_query_w_named_params(client) + client_query_w_named_params.client_query_w_named_params() out, err = capsys.readouterr() assert "the" in out diff --git a/bigquery/samples/tests/test_client_query_w_positional_params.py b/bigquery/samples/tests/test_client_query_w_positional_params.py index 37c15b67b120..e41ffa825584 100644 --- a/bigquery/samples/tests/test_client_query_w_positional_params.py +++ b/bigquery/samples/tests/test_client_query_w_positional_params.py @@ -15,8 +15,8 @@ from .. import client_query_w_positional_params -def test_client_query_w_positional_params(capsys, client): +def test_client_query_w_positional_params(capsys,): - client_query_w_positional_params.client_query_w_positional_params(client) + client_query_w_positional_params.client_query_w_positional_params() out, err = capsys.readouterr() assert "the" in out diff --git a/bigquery/samples/tests/test_client_query_w_struct_params.py b/bigquery/samples/tests/test_client_query_w_struct_params.py index 9d0c4282946b..03083a3a72c7 100644 --- a/bigquery/samples/tests/test_client_query_w_struct_params.py +++ b/bigquery/samples/tests/test_client_query_w_struct_params.py @@ -15,9 +15,9 @@ from .. import client_query_w_struct_params -def test_client_query_w_struct_params(capsys, client): +def test_client_query_w_struct_params(capsys,): - client_query_w_struct_params.client_query_w_struct_params(client) + client_query_w_struct_params.client_query_w_struct_params() out, err = capsys.readouterr() assert "1" in out assert "foo" in out diff --git a/bigquery/samples/tests/test_client_query_w_timestamp_params.py b/bigquery/samples/tests/test_client_query_w_timestamp_params.py index 45f7b7518454..9dddcb9a0e5d 100644 --- a/bigquery/samples/tests/test_client_query_w_timestamp_params.py +++ b/bigquery/samples/tests/test_client_query_w_timestamp_params.py @@ -15,8 +15,8 @@ from .. import client_query_w_timestamp_params -def test_client_query_w_timestamp_params(capsys, client): +def test_client_query_w_timestamp_params(capsys,): - client_query_w_timestamp_params.client_query_w_timestamp_params(client) + client_query_w_timestamp_params.client_query_w_timestamp_params() out, err = capsys.readouterr() assert "2016, 12, 7, 9, 0" in out diff --git a/bigquery/samples/tests/test_copy_table.py b/bigquery/samples/tests/test_copy_table.py index 0138cd8ee1e2..0b95c5443777 100644 --- a/bigquery/samples/tests/test_copy_table.py +++ b/bigquery/samples/tests/test_copy_table.py @@ -15,9 +15,9 @@ from .. import copy_table -def test_copy_table(capsys, client, table_with_data_id, random_table_id): +def test_copy_table(capsys, table_with_data_id, random_table_id, client): - copy_table.copy_table(client, table_with_data_id, random_table_id) + copy_table.copy_table(table_with_data_id, random_table_id) out, err = capsys.readouterr() assert "A copy of the table created." in out assert ( diff --git a/bigquery/samples/tests/test_copy_table_cmek.py b/bigquery/samples/tests/test_copy_table_cmek.py index 25238071b947..ac04675c989d 100644 --- a/bigquery/samples/tests/test_copy_table_cmek.py +++ b/bigquery/samples/tests/test_copy_table_cmek.py @@ -15,12 +15,8 @@ from .. import copy_table_cmek -def test_copy_table_cmek( - capsys, client, random_table_id, table_with_data_id, kms_key_name -): +def test_copy_table_cmek(capsys, random_table_id, table_with_data_id, kms_key_name): - copy_table_cmek.copy_table_cmek( - client, random_table_id, table_with_data_id, kms_key_name - ) + copy_table_cmek.copy_table_cmek(random_table_id, table_with_data_id, kms_key_name) out, err = capsys.readouterr() assert "A copy of the table created" in out diff --git a/bigquery/samples/tests/test_copy_table_multiple_source.py b/bigquery/samples/tests/test_copy_table_multiple_source.py index 16c1de89627c..45c6d34f5a41 100644 --- a/bigquery/samples/tests/test_copy_table_multiple_source.py +++ b/bigquery/samples/tests/test_copy_table_multiple_source.py @@ -18,7 +18,7 @@ from .. import copy_table_multiple_source -def test_copy_table_multiple_source(capsys, client, random_table_id, random_dataset_id): +def test_copy_table_multiple_source(capsys, random_table_id, random_dataset_id, client): dataset = bigquery.Dataset(random_dataset_id) dataset.location = "US" @@ -42,9 +42,7 @@ def test_copy_table_multiple_source(capsys, client, random_table_id, random_data "{}.table2".format(random_dataset_id), ] - copy_table_multiple_source.copy_table_multiple_source( - client, random_table_id, table_ids - ) + copy_table_multiple_source.copy_table_multiple_source(random_table_id, table_ids) dest_table = client.get_table(random_table_id) out, err = capsys.readouterr() assert ( diff --git a/bigquery/samples/tests/test_create_dataset.py b/bigquery/samples/tests/test_create_dataset.py index dfadc67d8468..a000038030e1 100644 --- a/bigquery/samples/tests/test_create_dataset.py +++ b/bigquery/samples/tests/test_create_dataset.py @@ -15,8 +15,8 @@ from .. import create_dataset -def test_create_dataset(capsys, client, random_dataset_id): +def test_create_dataset(capsys, random_dataset_id): - create_dataset.create_dataset(client, random_dataset_id) + create_dataset.create_dataset(random_dataset_id) out, err = capsys.readouterr() assert "Created dataset {}".format(random_dataset_id) in out diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py index bbf880cbe402..eab4b3e485f9 100644 --- a/bigquery/samples/tests/test_create_job.py +++ b/bigquery/samples/tests/test_create_job.py @@ -16,8 +16,7 @@ def test_create_job(capsys, client): - - query_job = create_job.create_job(client) + query_job = create_job.create_job() client.cancel_job(query_job.job_id, location=query_job.location) out, err = capsys.readouterr() assert "Started job: {}".format(query_job.job_id) in out diff --git a/bigquery/samples/tests/test_create_table.py b/bigquery/samples/tests/test_create_table.py index 093ee6e94277..48e52889acce 100644 --- a/bigquery/samples/tests/test_create_table.py +++ b/bigquery/samples/tests/test_create_table.py @@ -15,7 +15,7 @@ from .. import create_table -def test_create_table(capsys, client, random_table_id): - create_table.create_table(client, random_table_id) +def test_create_table(capsys, random_table_id): + create_table.create_table(random_table_id) out, err = capsys.readouterr() assert "Created table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_create_table_range_partitioned.py b/bigquery/samples/tests/test_create_table_range_partitioned.py index ac312b033832..9745966bf02b 100644 --- a/bigquery/samples/tests/test_create_table_range_partitioned.py +++ b/bigquery/samples/tests/test_create_table_range_partitioned.py @@ -15,9 +15,9 @@ from .. import create_table_range_partitioned -def test_create_table_range_partitioned(capsys, client, random_table_id): +def test_create_table_range_partitioned(capsys, random_table_id): table = create_table_range_partitioned.create_table_range_partitioned( - client, random_table_id + random_table_id ) out, _ = capsys.readouterr() assert "Created table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_dataset_exists.py b/bigquery/samples/tests/test_dataset_exists.py index a44e60371120..6bc38b4d27ce 100644 --- a/bigquery/samples/tests/test_dataset_exists.py +++ b/bigquery/samples/tests/test_dataset_exists.py @@ -17,13 +17,13 @@ from .. import dataset_exists -def test_dataset_exists(capsys, client, random_dataset_id): +def test_dataset_exists(capsys, random_dataset_id, client): - dataset_exists.dataset_exists(client, random_dataset_id) + dataset_exists.dataset_exists(random_dataset_id) out, err = capsys.readouterr() assert "Dataset {} is not found".format(random_dataset_id) in out dataset = bigquery.Dataset(random_dataset_id) dataset = client.create_dataset(dataset) - dataset_exists.dataset_exists(client, random_dataset_id) + dataset_exists.dataset_exists(random_dataset_id) out, err = capsys.readouterr() assert "Dataset {} already exists".format(random_dataset_id) in out diff --git a/bigquery/samples/tests/test_dataset_label_samples.py b/bigquery/samples/tests/test_dataset_label_samples.py index 94a2092407b0..0dbb2a76bdd9 100644 --- a/bigquery/samples/tests/test_dataset_label_samples.py +++ b/bigquery/samples/tests/test_dataset_label_samples.py @@ -17,17 +17,17 @@ from .. import label_dataset -def test_dataset_label_samples(capsys, client, dataset_id): +def test_dataset_label_samples(capsys, dataset_id): - label_dataset.label_dataset(client, dataset_id) + label_dataset.label_dataset(dataset_id) out, err = capsys.readouterr() assert "Labels added to {}".format(dataset_id) in out - get_dataset_labels.get_dataset_labels(client, dataset_id) + get_dataset_labels.get_dataset_labels(dataset_id) out, err = capsys.readouterr() assert "color: green" in out - dataset = delete_dataset_labels.delete_dataset_labels(client, dataset_id) + dataset = delete_dataset_labels.delete_dataset_labels(dataset_id) out, err = capsys.readouterr() assert "Labels deleted from {}".format(dataset_id) in out assert dataset.labels.get("color") is None diff --git a/bigquery/samples/tests/test_delete_dataset.py b/bigquery/samples/tests/test_delete_dataset.py index 2b1b6ad06195..1f9b3c823fb9 100644 --- a/bigquery/samples/tests/test_delete_dataset.py +++ b/bigquery/samples/tests/test_delete_dataset.py @@ -15,8 +15,8 @@ from .. import delete_dataset -def test_delete_dataset(capsys, client, dataset_id): +def test_delete_dataset(capsys, dataset_id): - delete_dataset.delete_dataset(client, dataset_id) + delete_dataset.delete_dataset(dataset_id) out, err = capsys.readouterr() assert "Deleted dataset '{}'.".format(dataset_id) in out diff --git a/bigquery/samples/tests/test_delete_table.py b/bigquery/samples/tests/test_delete_table.py index 8f4796623a83..7065743b0485 100644 --- a/bigquery/samples/tests/test_delete_table.py +++ b/bigquery/samples/tests/test_delete_table.py @@ -15,8 +15,8 @@ from .. import delete_table -def test_delete_table(capsys, client, table_id): +def test_delete_table(capsys, table_id): - delete_table.delete_table(client, table_id) + delete_table.delete_table(table_id) out, err = capsys.readouterr() assert "Deleted table '{}'.".format(table_id) in out diff --git a/bigquery/samples/tests/test_download_public_data.py b/bigquery/samples/tests/test_download_public_data.py index 8ee0e6a68c17..82297b2032f6 100644 --- a/bigquery/samples/tests/test_download_public_data.py +++ b/bigquery/samples/tests/test_download_public_data.py @@ -17,11 +17,11 @@ from .. import download_public_data -def test_download_public_data(caplog, capsys, client): +def test_download_public_data(caplog, capsys): # Enable debug-level logging to verify the BigQuery Storage API is used. caplog.set_level(logging.DEBUG) - download_public_data.download_public_data(client) + download_public_data.download_public_data() out, _ = capsys.readouterr() assert "year" in out assert "gender" in out diff --git a/bigquery/samples/tests/test_download_public_data_sandbox.py b/bigquery/samples/tests/test_download_public_data_sandbox.py index 74dadc1db3fb..e322cb2e54c9 100644 --- a/bigquery/samples/tests/test_download_public_data_sandbox.py +++ b/bigquery/samples/tests/test_download_public_data_sandbox.py @@ -17,11 +17,11 @@ from .. import download_public_data_sandbox -def test_download_public_data_sandbox(caplog, capsys, client): +def test_download_public_data_sandbox(caplog, capsys): # Enable debug-level logging to verify the BigQuery Storage API is used. caplog.set_level(logging.DEBUG) - download_public_data_sandbox.download_public_data_sandbox(client) + download_public_data_sandbox.download_public_data_sandbox() out, err = capsys.readouterr() assert "year" in out assert "gender" in out diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py index dedec1d7b29e..3afdb00d39bd 100644 --- a/bigquery/samples/tests/test_get_dataset.py +++ b/bigquery/samples/tests/test_get_dataset.py @@ -15,8 +15,8 @@ from .. import get_dataset -def test_get_dataset(capsys, client, dataset_id): +def test_get_dataset(capsys, dataset_id): - get_dataset.get_dataset(client, dataset_id) + get_dataset.get_dataset(dataset_id) out, err = capsys.readouterr() assert dataset_id in out diff --git a/bigquery/samples/tests/test_get_table.py b/bigquery/samples/tests/test_get_table.py index b950d434aef6..8bbd0681b584 100644 --- a/bigquery/samples/tests/test_get_table.py +++ b/bigquery/samples/tests/test_get_table.py @@ -17,7 +17,7 @@ from .. import get_table -def test_get_table(capsys, client, random_table_id): +def test_get_table(capsys, random_table_id, client): schema = [ bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), @@ -28,7 +28,7 @@ def test_get_table(capsys, client, random_table_id): table.description = "Sample Table" table = client.create_table(table) - get_table.get_table(client, random_table_id) + get_table.get_table(random_table_id) out, err = capsys.readouterr() assert "Got table '{}'.".format(random_table_id) in out assert "full_name" in out diff --git a/bigquery/samples/tests/test_list_datasets.py b/bigquery/samples/tests/test_list_datasets.py index 4c66a24f9b1a..1610d0e4a3ba 100644 --- a/bigquery/samples/tests/test_list_datasets.py +++ b/bigquery/samples/tests/test_list_datasets.py @@ -15,8 +15,7 @@ from .. import list_datasets -def test_list_datasets(capsys, client, dataset_id): - - list_datasets.list_datasets(client) +def test_list_datasets(capsys, dataset_id, client): + list_datasets.list_datasets() out, err = capsys.readouterr() assert "Datasets in project {}:".format(client.project) in out diff --git a/bigquery/samples/tests/test_list_datasets_by_label.py b/bigquery/samples/tests/test_list_datasets_by_label.py index 6d04a281ff42..5b375f4f4ee5 100644 --- a/bigquery/samples/tests/test_list_datasets_by_label.py +++ b/bigquery/samples/tests/test_list_datasets_by_label.py @@ -15,11 +15,10 @@ from .. import list_datasets_by_label -def test_list_datasets_by_label(capsys, client, dataset_id): - +def test_list_datasets_by_label(capsys, dataset_id, client): dataset = client.get_dataset(dataset_id) dataset.labels = {"color": "green"} dataset = client.update_dataset(dataset, ["labels"]) - list_datasets_by_label.list_datasets_by_label(client) + list_datasets_by_label.list_datasets_by_label() out, err = capsys.readouterr() assert dataset_id in out diff --git a/bigquery/samples/tests/test_list_tables.py b/bigquery/samples/tests/test_list_tables.py index ec1621ac7579..f9426aa53d21 100644 --- a/bigquery/samples/tests/test_list_tables.py +++ b/bigquery/samples/tests/test_list_tables.py @@ -15,9 +15,9 @@ from .. import list_tables -def test_list_tables(capsys, client, dataset_id, table_id): +def test_list_tables(capsys, dataset_id, table_id): - list_tables.list_tables(client, dataset_id) + list_tables.list_tables(dataset_id) out, err = capsys.readouterr() assert "Tables contained in '{}':".format(dataset_id) in out assert table_id in out diff --git a/bigquery/samples/tests/test_load_table_dataframe.py b/bigquery/samples/tests/test_load_table_dataframe.py index 3b7cb16ea692..17ec1114dc72 100644 --- a/bigquery/samples/tests/test_load_table_dataframe.py +++ b/bigquery/samples/tests/test_load_table_dataframe.py @@ -21,9 +21,9 @@ pyarrow = pytest.importorskip("pyarrow") -def test_load_table_dataframe(capsys, client, random_table_id): +def test_load_table_dataframe(capsys, random_table_id): - table = load_table_dataframe.load_table_dataframe(client, random_table_id) + table = load_table_dataframe.load_table_dataframe(random_table_id) out, _ = capsys.readouterr() assert "Loaded 4 rows and 3 columns" in out diff --git a/bigquery/samples/tests/test_model_samples.py b/bigquery/samples/tests/test_model_samples.py index d7b06a92a3e1..ebefad846642 100644 --- a/bigquery/samples/tests/test_model_samples.py +++ b/bigquery/samples/tests/test_model_samples.py @@ -18,22 +18,22 @@ from .. import update_model -def test_model_samples(capsys, client, dataset_id, model_id): +def test_model_samples(capsys, dataset_id, model_id): """Since creating a model is a long operation, test all model samples in the same test, following a typical end-to-end flow. """ - get_model.get_model(client, model_id) + get_model.get_model(model_id) out, err = capsys.readouterr() assert model_id in out - list_models.list_models(client, dataset_id) + list_models.list_models(dataset_id) out, err = capsys.readouterr() assert "Models contained in '{}':".format(dataset_id) in out - update_model.update_model(client, model_id) + update_model.update_model(model_id) out, err = capsys.readouterr() assert "This model was modified from a Python program." in out - delete_model.delete_model(client, model_id) + delete_model.delete_model(model_id) out, err = capsys.readouterr() assert "Deleted model '{}'.".format(model_id) in out diff --git a/bigquery/samples/tests/test_query_external_gcs_temporary_table.py b/bigquery/samples/tests/test_query_external_gcs_temporary_table.py index ea5b5d4dfcda..022b327be21b 100644 --- a/bigquery/samples/tests/test_query_external_gcs_temporary_table.py +++ b/bigquery/samples/tests/test_query_external_gcs_temporary_table.py @@ -15,8 +15,8 @@ from .. import query_external_gcs_temporary_table -def test_query_external_gcs_temporary_table(capsys, client): +def test_query_external_gcs_temporary_table(capsys,): - query_external_gcs_temporary_table.query_external_gcs_temporary_table(client) + query_external_gcs_temporary_table.query_external_gcs_temporary_table() out, err = capsys.readouterr() assert "There are 4 states with names starting with W." in out diff --git a/bigquery/samples/tests/test_query_no_cache.py b/bigquery/samples/tests/test_query_no_cache.py index 68f0774d935f..df17d0d0b04f 100644 --- a/bigquery/samples/tests/test_query_no_cache.py +++ b/bigquery/samples/tests/test_query_no_cache.py @@ -17,8 +17,8 @@ from .. import query_no_cache -def test_query_no_cache(capsys, client): +def test_query_no_cache(capsys,): - query_no_cache.query_no_cache(client) + query_no_cache.query_no_cache() out, err = capsys.readouterr() assert re.search(r"(Row[\w(){}:', ]+)$", out) diff --git a/bigquery/samples/tests/test_query_pagination.py b/bigquery/samples/tests/test_query_pagination.py index 93352fa0bf1d..7ab049c8ce7c 100644 --- a/bigquery/samples/tests/test_query_pagination.py +++ b/bigquery/samples/tests/test_query_pagination.py @@ -15,9 +15,9 @@ from .. import query_pagination -def test_query_pagination(capsys, client): +def test_query_pagination(capsys,): - query_pagination.query_pagination(client) + query_pagination.query_pagination() out, _ = capsys.readouterr() assert "The query data:" in out assert "name=James, count=4942431" in out diff --git a/bigquery/samples/tests/test_query_script.py b/bigquery/samples/tests/test_query_script.py index 70bb9df76fd4..037664d369ee 100644 --- a/bigquery/samples/tests/test_query_script.py +++ b/bigquery/samples/tests/test_query_script.py @@ -15,9 +15,9 @@ from .. import query_script -def test_query_script(capsys, client): +def test_query_script(capsys,): - query_script.query_script(client) + query_script.query_script() out, _ = capsys.readouterr() assert "Script created 2 child jobs." in out assert ( diff --git a/bigquery/samples/tests/test_query_to_arrow.py b/bigquery/samples/tests/test_query_to_arrow.py index dd9b3ab508cc..77d3f7130305 100644 --- a/bigquery/samples/tests/test_query_to_arrow.py +++ b/bigquery/samples/tests/test_query_to_arrow.py @@ -17,9 +17,9 @@ from .. import query_to_arrow -def test_query_to_arrow(capsys, client): +def test_query_to_arrow(capsys,): - arrow_table = query_to_arrow.query_to_arrow(client) + arrow_table = query_to_arrow.query_to_arrow() out, err = capsys.readouterr() assert "Downloaded 8 rows, 2 columns." in out arrow_schema = arrow_table.schema diff --git a/bigquery/samples/tests/test_routine_samples.py b/bigquery/samples/tests/test_routine_samples.py index 81d33a0cf5df..a4467c59a896 100644 --- a/bigquery/samples/tests/test_routine_samples.py +++ b/bigquery/samples/tests/test_routine_samples.py @@ -16,18 +16,18 @@ from google.cloud import bigquery_v2 -def test_create_routine(capsys, client, random_routine_id): +def test_create_routine(capsys, random_routine_id): from .. import create_routine - create_routine.create_routine(client, random_routine_id) + create_routine.create_routine(random_routine_id) out, err = capsys.readouterr() assert "Created routine {}".format(random_routine_id) in out -def test_create_routine_ddl(capsys, client, random_routine_id): +def test_create_routine_ddl(capsys, random_routine_id, client): from .. import create_routine_ddl - create_routine_ddl.create_routine_ddl(client, random_routine_id) + create_routine_ddl.create_routine_ddl(random_routine_id) routine = client.get_routine(random_routine_id) out, err = capsys.readouterr() @@ -65,19 +65,19 @@ def test_create_routine_ddl(capsys, client, random_routine_id): assert routine.arguments == expected_arguments -def test_list_routines(capsys, client, dataset_id, routine_id): +def test_list_routines(capsys, dataset_id, routine_id): from .. import list_routines - list_routines.list_routines(client, dataset_id) + list_routines.list_routines(dataset_id) out, err = capsys.readouterr() assert "Routines contained in dataset {}:".format(dataset_id) in out assert routine_id in out -def test_get_routine(capsys, client, routine_id): +def test_get_routine(capsys, routine_id): from .. import get_routine - get_routine.get_routine(client, routine_id) + get_routine.get_routine(routine_id) out, err = capsys.readouterr() assert "Routine '{}':".format(routine_id) in out assert "Type: 'SCALAR_FUNCTION'" in out @@ -86,16 +86,16 @@ def test_get_routine(capsys, client, routine_id): assert "Type: 'type_kind: INT64\n'" in out -def test_delete_routine(capsys, client, routine_id): +def test_delete_routine(capsys, routine_id): from .. import delete_routine - delete_routine.delete_routine(client, routine_id) + delete_routine.delete_routine(routine_id) out, err = capsys.readouterr() assert "Deleted routine {}.".format(routine_id) in out -def test_update_routine(client, routine_id): +def test_update_routine(routine_id): from .. import update_routine - routine = update_routine.update_routine(client, routine_id) + routine = update_routine.update_routine(routine_id) assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_table_exists.py b/bigquery/samples/tests/test_table_exists.py index ae4fc65f847c..d1f579a64528 100644 --- a/bigquery/samples/tests/test_table_exists.py +++ b/bigquery/samples/tests/test_table_exists.py @@ -17,13 +17,13 @@ from .. import table_exists -def test_table_exists(capsys, client, random_table_id): +def test_table_exists(capsys, random_table_id, client): - table_exists.table_exists(client, random_table_id) + table_exists.table_exists(random_table_id) out, err = capsys.readouterr() assert "Table {} is not found.".format(random_table_id) in out table = bigquery.Table(random_table_id) table = client.create_table(table) - table_exists.table_exists(client, random_table_id) + table_exists.table_exists(random_table_id) out, err = capsys.readouterr() assert "Table {} already exists.".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_table_insert_rows.py b/bigquery/samples/tests/test_table_insert_rows.py index 9c5fd5768cfb..72b51df9c485 100644 --- a/bigquery/samples/tests/test_table_insert_rows.py +++ b/bigquery/samples/tests/test_table_insert_rows.py @@ -17,7 +17,7 @@ from .. import table_insert_rows -def test_table_insert_rows(capsys, client, random_table_id): +def test_table_insert_rows(capsys, random_table_id, client): schema = [ bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), @@ -27,6 +27,6 @@ def test_table_insert_rows(capsys, client, random_table_id): table = bigquery.Table(random_table_id, schema=schema) table = client.create_table(table) - table_insert_rows.table_insert_rows(client, random_table_id) + table_insert_rows.table_insert_rows(random_table_id) out, err = capsys.readouterr() assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py index a2a4febd7f75..c6199894a72c 100644 --- a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py +++ b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py @@ -17,7 +17,7 @@ from .. import table_insert_rows_explicit_none_insert_ids as mut -def test_table_insert_rows_explicit_none_insert_ids(capsys, client, random_table_id): +def test_table_insert_rows_explicit_none_insert_ids(capsys, random_table_id, client): schema = [ bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), @@ -27,6 +27,6 @@ def test_table_insert_rows_explicit_none_insert_ids(capsys, client, random_table table = bigquery.Table(random_table_id, schema=schema) table = client.create_table(table) - mut.table_insert_rows_explicit_none_insert_ids(client, random_table_id) + mut.table_insert_rows_explicit_none_insert_ids(random_table_id) out, err = capsys.readouterr() assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_undelete_table.py b/bigquery/samples/tests/test_undelete_table.py index 8fd221a39b30..a070abdbd36b 100644 --- a/bigquery/samples/tests/test_undelete_table.py +++ b/bigquery/samples/tests/test_undelete_table.py @@ -15,8 +15,8 @@ from .. import undelete_table -def test_undelete_table(capsys, client, table_with_schema_id, random_table_id): - undelete_table.undelete_table(client, table_with_schema_id, random_table_id) +def test_undelete_table(capsys, table_with_schema_id, random_table_id): + undelete_table.undelete_table(table_with_schema_id, random_table_id) out, _ = capsys.readouterr() assert ( "Copied data from deleted table {} to {}".format( diff --git a/bigquery/samples/tests/test_update_dataset_access.py b/bigquery/samples/tests/test_update_dataset_access.py index ae33dbfe4a4c..4c0aa835baf0 100644 --- a/bigquery/samples/tests/test_update_dataset_access.py +++ b/bigquery/samples/tests/test_update_dataset_access.py @@ -15,9 +15,9 @@ from .. import update_dataset_access -def test_update_dataset_access(capsys, client, dataset_id): +def test_update_dataset_access(capsys, dataset_id): - update_dataset_access.update_dataset_access(client, dataset_id) + update_dataset_access.update_dataset_access(dataset_id) out, err = capsys.readouterr() assert ( "Updated dataset '{}' with modified user permissions.".format(dataset_id) in out diff --git a/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py index 55fa4b0d96fb..a5a8e6b5202c 100644 --- a/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py +++ b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py @@ -15,12 +15,12 @@ from .. import update_dataset_default_partition_expiration -def test_update_dataset_default_partition_expiration(capsys, client, dataset_id): +def test_update_dataset_default_partition_expiration(capsys, dataset_id): ninety_days_ms = 90 * 24 * 60 * 60 * 1000 # in milliseconds update_dataset_default_partition_expiration.update_dataset_default_partition_expiration( - client, dataset_id + dataset_id ) out, _ = capsys.readouterr() assert ( diff --git a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py index 46e9654209ed..b0f7013228e6 100644 --- a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py +++ b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py @@ -15,12 +15,12 @@ from .. import update_dataset_default_table_expiration -def test_update_dataset_default_table_expiration(capsys, client, dataset_id): +def test_update_dataset_default_table_expiration(capsys, dataset_id): one_day_ms = 24 * 60 * 60 * 1000 # in milliseconds update_dataset_default_table_expiration.update_dataset_default_table_expiration( - client, dataset_id + dataset_id ) out, err = capsys.readouterr() assert ( diff --git a/bigquery/samples/tests/test_update_dataset_description.py b/bigquery/samples/tests/test_update_dataset_description.py index c6f8889f50da..e4ff586c7bc2 100644 --- a/bigquery/samples/tests/test_update_dataset_description.py +++ b/bigquery/samples/tests/test_update_dataset_description.py @@ -15,8 +15,8 @@ from .. import update_dataset_description -def test_update_dataset_description(capsys, client, dataset_id): +def test_update_dataset_description(capsys, dataset_id): - update_dataset_description.update_dataset_description(client, dataset_id) + update_dataset_description.update_dataset_description(dataset_id) out, err = capsys.readouterr() assert "Updated description." in out diff --git a/bigquery/samples/tests/test_update_table_require_partition_filter.py b/bigquery/samples/tests/test_update_table_require_partition_filter.py index 7ce6d64c780a..7e9ca6f2b44f 100644 --- a/bigquery/samples/tests/test_update_table_require_partition_filter.py +++ b/bigquery/samples/tests/test_update_table_require_partition_filter.py @@ -17,7 +17,7 @@ from .. import update_table_require_partition_filter -def test_update_table_require_partition_filter(capsys, client, random_table_id): +def test_update_table_require_partition_filter(capsys, random_table_id, client): # Make a partitioned table. schema = [bigquery.SchemaField("transaction_timestamp", "TIMESTAMP")] @@ -26,7 +26,7 @@ def test_update_table_require_partition_filter(capsys, client, random_table_id): table = client.create_table(table) update_table_require_partition_filter.update_table_require_partition_filter( - client, random_table_id + random_table_id ) out, _ = capsys.readouterr() assert ( diff --git a/bigquery/samples/undelete_table.py b/bigquery/samples/undelete_table.py index 9db9712d2a74..18b15801ffee 100644 --- a/bigquery/samples/undelete_table.py +++ b/bigquery/samples/undelete_table.py @@ -15,15 +15,14 @@ from google.api_core import datetime_helpers -def undelete_table(client, table_id, recovered_table_id): +def undelete_table(table_id, recovered_table_id): # [START bigquery_undelete_table] import time - # TODO(developer): Import the client library. - # from google.cloud import bigquery + from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Choose a table to recover. # table_id = "your-project.your_dataset.your_table" diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py index 134cf1b940cf..6e844cc90799 100644 --- a/bigquery/samples/update_dataset_access.py +++ b/bigquery/samples/update_dataset_access.py @@ -13,13 +13,13 @@ # limitations under the License. -def update_dataset_access(client, dataset_id): +def update_dataset_access(dataset_id): # [START bigquery_update_dataset_access] from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' diff --git a/bigquery/samples/update_dataset_default_partition_expiration.py b/bigquery/samples/update_dataset_default_partition_expiration.py index 502d52ff199b..18cfb92db9b4 100644 --- a/bigquery/samples/update_dataset_default_partition_expiration.py +++ b/bigquery/samples/update_dataset_default_partition_expiration.py @@ -13,14 +13,14 @@ # limitations under the License. -def update_dataset_default_partition_expiration(client, dataset_id): +def update_dataset_default_partition_expiration(dataset_id): # [START bigquery_update_dataset_partition_expiration] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py index 8de354b1f21b..b7e5cea9b20d 100644 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -13,14 +13,14 @@ # limitations under the License. -def update_dataset_default_table_expiration(client, dataset_id): +def update_dataset_default_table_expiration(dataset_id): # [START bigquery_update_dataset_expiration] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py index 08eed8da2b64..0732b1c618e8 100644 --- a/bigquery/samples/update_dataset_description.py +++ b/bigquery/samples/update_dataset_description.py @@ -13,14 +13,14 @@ # limitations under the License. -def update_dataset_description(client, dataset_id): +def update_dataset_description(dataset_id): # [START bigquery_update_dataset_description] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py index 7583c410e1ef..db262d8cc43c 100644 --- a/bigquery/samples/update_model.py +++ b/bigquery/samples/update_model.py @@ -13,15 +13,15 @@ # limitations under the License. -def update_model(client, model_id): +def update_model(model_id): """Sample ID: go/samples-tracker/1533""" # [START bigquery_update_model_description] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py index 4489d68f7ee4..61c6855b5041 100644 --- a/bigquery/samples/update_routine.py +++ b/bigquery/samples/update_routine.py @@ -13,14 +13,14 @@ # limitations under the License. -def update_routine(client, routine_id): +def update_routine(routine_id): # [START bigquery_update_routine] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" diff --git a/bigquery/samples/update_table_require_partition_filter.py b/bigquery/samples/update_table_require_partition_filter.py index 4c6be2d2cedc..cf1d532774b2 100644 --- a/bigquery/samples/update_table_require_partition_filter.py +++ b/bigquery/samples/update_table_require_partition_filter.py @@ -13,14 +13,14 @@ # limitations under the License. -def update_table_require_partition_filter(client, table_id): +def update_table_require_partition_filter(table_id): # [START bigquery_update_table_require_partition_filter] - # TODO(developer): Import the client library. - # from google.cloud import bigquery - # TODO(developer): Construct a BigQuery client object. - # client = bigquery.Client() + from google.cloud import bigquery + + # Construct a BigQuery client object. + client = bigquery.Client() # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table'