diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
index d76692635508..26f79be6f754 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
@@ -139,6 +139,16 @@ public Table build() {
/**
* Checks if this table exists.
*
+ *
Example of checking if the table exists.
+ *
{@code
+ * boolean exists = table.exists();
+ * if (exists) {
+ * // the table exists
+ * } else {
+ * // the table was not found
+ * }
+ * }
+ *
* @return {@code true} if this table exists, {@code false} otherwise
* @throws BigQueryException upon failure
*/
@@ -149,6 +159,17 @@ public boolean exists() {
/**
* Fetches current table's latest information. Returns {@code null} if the table does not exist.
*
+ * Example of fetching the table's latest information, specifying particular table fields to
+ * get.
+ *
{@code
+ * TableField field1 = TableField.LAST_MODIFIED_TIME;
+ * TableField field2 = TableField.NUM_ROWS;
+ * Table latestTable = table.reload(TableOption.fields(field1, field2));
+ * if (latestTable == null) {
+ * // the table was not found
+ * }
+ * }
+ *
* @param options table options
* @return a {@code Table} object with latest information or {@code null} if not found
* @throws BigQueryException upon failure
@@ -161,6 +182,11 @@ public Table reload(TableOption... options) {
* Updates the table's information with this table's information. Dataset's and table's
* user-defined ids cannot be changed. A new {@code Table} object is returned.
*
+ * Example of updating the table's information.
+ *
{@code
+ * Table updatedTable = table.toBuilder().description("new description").build().update();
+ * }
+ *
* @param options dataset options
* @return a {@code Table} object with updated information
* @throws BigQueryException upon failure
@@ -172,6 +198,16 @@ public Table update(TableOption... options) {
/**
* Deletes this table.
*
+ * Example of deleting the table.
+ *
{@code
+ * boolean deleted = table.delete();
+ * if (deleted) {
+ * // the table was deleted
+ * } else {
+ * // the table was not found
+ * }
+ * }
+ *
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
@@ -182,6 +218,23 @@ public boolean delete() {
/**
* Insert rows into the table.
*
+ * Example of inserting rows into the table.
+ *
{@code
+ * String rowId1 = "rowId1";
+ * String rowId2 = "rowId2";
+ * List rows = new ArrayList<>();
+ * Map row1 = new HashMap<>();
+ * row1.put("stringField", "value1");
+ * row1.put("booleanField", true);
+ * Map row2 = new HashMap<>();
+ * row2.put("stringField", "value2");
+ * row2.put("booleanField", false);
+ * rows.add(RowToInsert.of(rowId1, row1));
+ * rows.add(RowToInsert.of(rowId2, row2));
+ * InsertAllResponse response = table.insert(rows);
+ * // do something with response
+ * }
+ *
* @param rows rows to be inserted
* @throws BigQueryException upon failure
*/
@@ -193,6 +246,23 @@ public InsertAllResponse insert(Iterable rows)
/**
* Insert rows into the table.
*
+ * Example of inserting rows into the table, ignoring invalid rows.
+ *
{@code
+ * String rowId1 = "rowId1";
+ * String rowId2 = "rowId2";
+ * List rows = new ArrayList<>();
+ * Map row1 = new HashMap<>();
+ * row1.put("stringField", 1);
+ * row1.put("booleanField", true);
+ * Map row2 = new HashMap<>();
+ * row2.put("stringField", "value2");
+ * row2.put("booleanField", false);
+ * rows.add(RowToInsert.of(rowId1, row1));
+ * rows.add(RowToInsert.of(rowId2, row2));
+ * InsertAllResponse response = table.insert(rows, true, true);
+ * // do something with response
+ * }
+ *
* @param rows rows to be inserted
* @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set
* the entire insert operation will fail if rows to be inserted contain an invalid row
@@ -202,7 +272,7 @@ public InsertAllResponse insert(Iterable rows)
* @throws BigQueryException upon failure
*/
public InsertAllResponse insert(Iterable rows,
- boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException {
+ boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException {
InsertAllRequest request = InsertAllRequest.builder(tableId(), rows)
.skipInvalidRows(skipInvalidRows)
.ignoreUnknownValues(ignoreUnknownValues)
@@ -213,6 +283,16 @@ public InsertAllResponse insert(Iterable rows,
/**
* Returns the paginated list rows in this table.
*
+ * Example of listing rows in the table.
+ *
{@code
+ * Page> page = table.list(TableDataListOption.pageSize(100));
+ * Iterator> rowIterator = page.iterateAll();
+ * while (rowIterator.hasNext()) {
+ * List row = rowIterator.next();
+ * // do something with the row
+ * }
+ * }
+ *
* @param options table data list options
* @throws BigQueryException upon failure
*/
@@ -225,6 +305,25 @@ public Page> list(TableDataListOption... options)
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
+ * Example of copying the table to a destination table.
+ *
{@code
+ * String datasetName = "my_dataset";
+ * String tableName = "my_destination_table";
+ * Job job = table.copy(datasetName, tableName);
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(3, TimeUnit.MINUTES));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully
+ * } else {
+ * // Handle error case
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait
+ * }
+ * }
+ *
* @param destinationDataset the user-defined id of the destination dataset
* @param destinationTable the user-defined id of the destination table
* @param options job options
@@ -239,6 +338,27 @@ public Job copy(String destinationDataset, String destinationTable, JobOption...
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
+ * Example copying the table to a destination table.
+ *
{@code
+ * String dataset = "my_dataset";
+ * String tableName = "my_destination_table";
+ * TableId destinationId = TableId.of(dataset, tableName);
+ * JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
+ * Job job = table.copy(destinationId, options);
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(3, TimeUnit.MINUTES));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait
+ * }
+ * }
+ *
* @param destinationTable the destination table of the copy job
* @param options job options
* @throws BigQueryException upon failure
@@ -253,6 +373,25 @@ public Job copy(TableId destinationTable, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the
* started {@link Job} object.
*
+ * Example extracting data to single Google Cloud Storage file.
+ *
{@code
+ * String format = "CSV";
+ * String gcsUrl = "gs://my_bucket/filename.csv";
+ * Job job = table.extract(format, gcsUrl);
+ * // Wait for the job to complete
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(3, TimeUnit.MINUTES));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully
+ * } else {
+ * // Handle error case
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait
+ * }
+ * }
+ *
* @param format the format of the extracted data
* @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path)
* where the extracted table should be written
@@ -268,6 +407,29 @@ public Job extract(String format, String destinationUri, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns
* the started {@link Job} object.
*
+ * Example of partitioning data to a list of Google Cloud Storage files.
+ *
{@code
+ * String format = "CSV";
+ * String gcsUrl1 = "gs://my_bucket/PartitionA_*.csv";
+ * String gcsUrl2 = "gs://my_bucket/PartitionB_*.csv";
+ * List destinationUris = new ArrayList<>();
+ * destinationUris.add(gcsUrl1);
+ * destinationUris.add(gcsUrl2);
+ * Job job = table.extract(format, destinationUris);
+ * // Wait for the job to complete
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(3, TimeUnit.MINUTES));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully
+ * } else {
+ * // Handle error case
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait
+ * }
+ * }
+ *
* @param format the format of the exported data
* @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path)
* where the extracted table should be written
@@ -285,6 +447,24 @@ public Job extract(String format, List destinationUris, JobOption... opt
* Starts a BigQuery Job to load data into the current table from the provided source URI. Returns
* the started {@link Job} object.
*
+ * Example loading data from a single Google Cloud Storage file.
+ *
{@code
+ * String sourceUri = "gs://my_bucket/filename.csv";
+ * Job job = table.load(FormatOptions.csv(), sourceUri);
+ * // Wait for the job to complete
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(3, TimeUnit.MINUTES));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully
+ * } else {
+ * // Handle error case
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait
+ * }
+ * }
+ *
* @param format the format of the data to load
* @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from
* which to load the data
@@ -300,6 +480,28 @@ public Job load(FormatOptions format, String sourceUri, JobOption... options)
* Starts a BigQuery Job to load data into the current table from the provided source URIs.
* Returns the started {@link Job} object.
*
+ * Example loading data from a list of Google Cloud Storage files.
+ *
{@code
+ * String gcsUrl1 = "gs://my_bucket/filename1.csv";
+ * String gcsUrl2 = "gs://my_bucket/filename2.csv";
+ * List sourceUris = new ArrayList<>();
+ * sourceUris.add(gcsUrl1);
+ * sourceUris.add(gcsUrl2);
+ * Job job = table.load(FormatOptions.csv(), sourceUris);
+ * // Wait for the job to complete
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(3, TimeUnit.MINUTES));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully
+ * } else {
+ * // Handle error case
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait
+ * }
+ * }
+ *
* @param format the format of the exported data
* @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from
* which to load the data
diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java
new file mode 100644
index 000000000000..c169f6badf58
--- /dev/null
+++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * EDITING INSTRUCTIONS
+ * This file is referenced in Table’s javadoc. Any change to this file should be reflected in
+ * Table’s javadoc.
+ */
+
+package com.google.cloud.examples.bigquery.snippets;
+
+import com.google.cloud.Page;
+import com.google.cloud.WaitForOption;
+import com.google.cloud.bigquery.BigQuery.JobField;
+import com.google.cloud.bigquery.BigQuery.JobOption;
+import com.google.cloud.bigquery.BigQuery.TableDataListOption;
+import com.google.cloud.bigquery.BigQuery.TableField;
+import com.google.cloud.bigquery.BigQuery.TableOption;
+import com.google.cloud.bigquery.BigQueryException;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.FormatOptions;
+import com.google.cloud.bigquery.InsertAllRequest.RowToInsert;
+import com.google.cloud.bigquery.InsertAllResponse;
+import com.google.cloud.bigquery.Job;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+
+/**
+ * This class contains a number of snippets for the {@link Table} class.
+ */
+public class TableSnippets {
+
+ private final Table table;
+
+ public TableSnippets(Table table) {
+ this.table = table;
+ }
+
+ /**
+ * Example of checking if the table exists.
+ */
+ // [TARGET exists()]
+ public boolean exists() {
+ // [START exists]
+ boolean exists = table.exists();
+ if (exists) {
+ // the table exists
+ } else {
+ // the table was not found
+ }
+ // [END exists]
+ return exists;
+ }
+
+ /**
+ * Example of fetching the table's latest information, specifying particular table fields to
+ * get.
+ */
+ // [TARGET reload(TableOption...)]
+ // [VARIABLE TableField.LAST_MODIFIED_TIME]
+ // [VARIABLE TableField.NUM_ROWS]
+ public Table reloadTableWithFields(TableField field1, TableField field2) {
+ // [START reloadTableWithFields]
+ Table latestTable = table.reload(TableOption.fields(field1, field2));
+ if (latestTable == null) {
+ // the table was not found
+ }
+ // [END reloadTableWithFields]
+ return latestTable;
+ }
+
+ /**
+ * Example of updating the table's information.
+ */
+ // [TARGET update(TableOption...)]
+ public Table update() {
+ // [START update]
+ Table updatedTable = table.toBuilder().description("new description").build().update();
+ // [END update]
+ return updatedTable;
+ }
+
+ /**
+ * Example of deleting the table.
+ */
+ // [TARGET delete()]
+ public boolean delete() {
+ // [START delete]
+ boolean deleted = table.delete();
+ if (deleted) {
+ // the table was deleted
+ } else {
+ // the table was not found
+ }
+ // [END delete]
+ return deleted;
+ }
+
+ /**
+ * Example of inserting rows into the table.
+ */
+ // [TARGET insert(Iterable)]
+ // [VARIABLE "rowId1"]
+ // [VARIABLE "rowId2"]
+ public InsertAllResponse insert(String rowId1, String rowId2) {
+ // [START insert]
+ List rows = new ArrayList<>();
+ Map row1 = new HashMap<>();
+ row1.put("stringField", "value1");
+ row1.put("booleanField", true);
+ Map row2 = new HashMap<>();
+ row2.put("stringField", "value2");
+ row2.put("booleanField", false);
+ rows.add(RowToInsert.of(rowId1, row1));
+ rows.add(RowToInsert.of(rowId2, row2));
+ InsertAllResponse response = table.insert(rows);
+ // do something with response
+ // [END insert]
+ return response;
+ }
+
+ /**
+ * Example of inserting rows into the table, ignoring invalid rows.
+ */
+ // [TARGET insert(Iterable, boolean, boolean)]
+ // [VARIABLE "rowId1"]
+ // [VARIABLE "rowId2"]
+ public InsertAllResponse insertWithParams(String rowId1, String rowId2) {
+ // [START insertWithParams]
+ List rows = new ArrayList<>();
+ Map row1 = new HashMap<>();
+ row1.put("stringField", 1);
+ row1.put("booleanField", true);
+ Map row2 = new HashMap<>();
+ row2.put("stringField", "value2");
+ row2.put("booleanField", false);
+ rows.add(RowToInsert.of(rowId1, row1));
+ rows.add(RowToInsert.of(rowId2, row2));
+ InsertAllResponse response = table.insert(rows, true, true);
+ // do something with response
+ // [END insertWithParams]
+ return response;
+ }
+
+ /**
+ * Example of listing rows in the table.
+ */
+ // [TARGET list(TableDataListOption...)]
+ public Page> list() {
+ // [START list]
+ Page> page = table.list(TableDataListOption.pageSize(100));
+ Iterator> rowIterator = page.iterateAll();
+ while (rowIterator.hasNext()) {
+ List row = rowIterator.next();
+ // do something with the row
+ }
+ // [END list]
+ return page;
+ }
+
+ /**
+ * Example of copying the table to a destination table.
+ */
+ // [TARGET copy(String, String, JobOption...)]
+ // [VARIABLE "my_dataset"]
+ // [VARIABLE "my_destination_table"]
+ public Job copy(String datasetName, String tableName) {
+ // [START copy]
+ Job job = table.copy(datasetName, tableName);
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(3, TimeUnit.MINUTES));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully
+ } else {
+ // Handle error case
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait
+ }
+ // [END copy]
+ return job;
+ }
+
+ /**
+ * Example copying the table to a destination table.
+ */
+ // [TARGET copy(TableId, JobOption...)]
+ // [VARIABLE "my_dataset"]
+ // [VARIABLE "my_destination_table"]
+ public Job copyTableId(String dataset, String tableName) throws BigQueryException {
+ // [START copyTableId]
+ TableId destinationId = TableId.of(dataset, tableName);
+ JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
+ Job job = table.copy(destinationId, options);
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(3, TimeUnit.MINUTES));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait
+ }
+ // [END copyTableId]
+ return job;
+ }
+
+ /**
+ * Example of partitioning data to a list of Google Cloud Storage files.
+ */
+ // [TARGET extract(String, List, JobOption...)]
+ // [VARIABLE "CSV"]
+ // [VARIABLE "gs://my_bucket/PartitionA_*.csv"]
+ // [VARIABLE "gs://my_bucket/PartitionB_*.csv"]
+ public Job extractList(String format, String gcsUrl1, String gcsUrl2) {
+ // [START extractList]
+ List destinationUris = new ArrayList<>();
+ destinationUris.add(gcsUrl1);
+ destinationUris.add(gcsUrl2);
+ Job job = table.extract(format, destinationUris);
+ // Wait for the job to complete
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(3, TimeUnit.MINUTES));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully
+ } else {
+ // Handle error case
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait
+ }
+ // [END extractList]
+ return job;
+ }
+
+ /**
+ * Example extracting data to single Google Cloud Storage file.
+ */
+ // [TARGET extract(String, String, JobOption...)]
+ // [VARIABLE "CSV"]
+ // [VARIABLE "gs://my_bucket/filename.csv"]
+ public Job extractSingle(String format, String gcsUrl) {
+ // [START extractSingle]
+ Job job = table.extract(format, gcsUrl);
+ // Wait for the job to complete
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(3, TimeUnit.MINUTES));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully
+ } else {
+ // Handle error case
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait
+ }
+ // [END extractSingle]
+ return job;
+ }
+
+ /**
+ * Example loading data from a list of Google Cloud Storage files.
+ */
+ // [TARGET load(FormatOptions, List, JobOption...)]
+ // [VARIABLE "gs://my_bucket/filename1.csv"]
+ // [VARIABLE "gs://my_bucket/filename2.csv"]
+ public Job loadList(String gcsUrl1, String gcsUrl2) {
+ // [START loadList]
+ List sourceUris = new ArrayList<>();
+ sourceUris.add(gcsUrl1);
+ sourceUris.add(gcsUrl2);
+ Job job = table.load(FormatOptions.csv(), sourceUris);
+ // Wait for the job to complete
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(3, TimeUnit.MINUTES));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully
+ } else {
+ // Handle error case
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait
+ }
+ // [END loadList]
+ return job;
+ }
+
+ /**
+ * Example loading data from a single Google Cloud Storage file.
+ */
+ // [TARGET load(FormatOptions, String, JobOption...)]
+ // [VARIABLE "gs://my_bucket/filename.csv"]
+ public Job loadSingle(String sourceUri) {
+ // [START loadSingle]
+ Job job = table.load(FormatOptions.csv(), sourceUri);
+ // Wait for the job to complete
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(3, TimeUnit.MINUTES));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully
+ } else {
+ // Handle error case
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait
+ }
+ // [END loadSingle]
+ return job;
+ }
+}
diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java
new file mode 100644
index 000000000000..6687c653d014
--- /dev/null
+++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java
@@ -0,0 +1,277 @@
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.examples.bigquery.snippets;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import com.google.cloud.Page;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQuery.TableDataListOption;
+import com.google.cloud.bigquery.BigQuery.TableField;
+import com.google.cloud.bigquery.DatasetInfo;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Field.Type;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.InsertAllResponse;
+import com.google.cloud.bigquery.Job;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
+import com.google.cloud.storage.BucketInfo;
+import com.google.cloud.storage.Storage;
+import com.google.cloud.storage.testing.RemoteStorageHelper;
+import com.google.common.base.Function;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Integration tests for {@link TableSnippets}.
+ */
+public class ITTableSnippets {
+
+ private static final String BASE_TABLE_NAME = "my_table";
+ private static final String DATASET_NAME = RemoteBigQueryHelper.generateDatasetName();
+ private static final String COPY_DATASET_NAME = RemoteBigQueryHelper.generateDatasetName();
+ private static final String BUCKET_NAME = RemoteStorageHelper.generateBucketName();
+ private static final Schema SCHEMA =
+ Schema.of(Field.of("stringField", Type.string()), Field.of("booleanField", Type.bool()));
+ private static final List> ROW1 = ImmutableList.of("value1", true);
+ private static final List> ROW2 = ImmutableList.of("value2", false);
+ private static final String DOOMED_TABLE_NAME = "doomed_table";
+ private static final TableId DOOMED_TABLE_ID = TableId.of(DATASET_NAME, DOOMED_TABLE_NAME);
+
+ private static BigQuery bigquery;
+ private static Storage storage;
+ private static int nextTableNumber;
+
+ private Table table;
+ private TableSnippets tableSnippets;
+
+ @Rule
+ public Timeout globalTimeout = Timeout.seconds(300);
+
+ @BeforeClass
+ public static void beforeClass() {
+ bigquery = RemoteBigQueryHelper.create().options().service();
+ bigquery.create(DatasetInfo.builder(DATASET_NAME).build());
+ bigquery.create(DatasetInfo.builder(COPY_DATASET_NAME).build());
+ storage = RemoteStorageHelper.create().options().service();
+ storage.create(BucketInfo.of(BUCKET_NAME));
+ }
+
+ @Before
+ public void before() {
+ ++nextTableNumber;
+ StandardTableDefinition.Builder builder = StandardTableDefinition.builder();
+ builder.schema(SCHEMA);
+ table = bigquery.create(TableInfo.of(getTableId(), builder.build()));
+ bigquery.create(TableInfo.of(getCopyTableId(), builder.build()));
+ tableSnippets = new TableSnippets(table);
+ }
+
+ @After
+ public void after() {
+ bigquery.delete(getTableId());
+ bigquery.delete(getCopyTableId());
+ }
+
+ @AfterClass
+ public static void afterClass() {
+ RemoteBigQueryHelper.forceDelete(bigquery, DATASET_NAME);
+ RemoteBigQueryHelper.forceDelete(bigquery, COPY_DATASET_NAME);
+ RemoteStorageHelper.forceDelete(storage, BUCKET_NAME);
+ }
+
+ private String getTableName() {
+ return BASE_TABLE_NAME + nextTableNumber;
+ }
+
+ private TableId getTableId() {
+ return TableId.of(DATASET_NAME, getTableName());
+ }
+
+ private String getCopyTableName() {
+ return BASE_TABLE_NAME + "_copy_" + nextTableNumber;
+ }
+
+ private TableId getCopyTableId() {
+ return TableId.of(COPY_DATASET_NAME, getCopyTableName());
+ }
+
+ @Test
+ public void testExists() {
+ assertTrue(tableSnippets.exists());
+ }
+
+ @Test
+ public void testReloadTableWithFields() {
+ Table latestTable =
+ tableSnippets.reloadTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS);
+ assertNotNull(latestTable);
+ assertNotNull(latestTable.lastModifiedTime());
+ }
+
+ @Test
+ public void testUpdate() {
+ Table updatedTable = tableSnippets.update();
+ assertEquals("new description", updatedTable.description());
+ }
+
+ @Test
+ public void testDelete() {
+ Table doomedTable =
+ bigquery.create(TableInfo.of(DOOMED_TABLE_ID, StandardTableDefinition.of(SCHEMA)));
+ TableSnippets doomedTableSnippets = new TableSnippets(doomedTable);
+ assertTrue(doomedTableSnippets.delete());
+ }
+
+ @Test
+ public void testInsert() throws InterruptedException {
+ InsertAllResponse response = tableSnippets.insert("row1", "row2");
+ assertFalse(response.hasErrors());
+ verifyTestRows(table);
+ }
+
+ @Test
+ public void testInsertParams() throws InterruptedException {
+ InsertAllResponse response = tableSnippets.insertWithParams("row1", "row2");
+ assertTrue(response.hasErrors());
+ List> rows = ImmutableList.copyOf(tableSnippets.list().values());
+ while (rows.isEmpty()) {
+ Thread.sleep(500);
+ rows = ImmutableList.copyOf(tableSnippets.list().values());
+ }
+ Set> values =
+ FluentIterable.from(rows).transform(new Function, List>>() {
+ @Override
+ public List> apply(List row) {
+ return ImmutableList.of(row.get(0).stringValue(), row.get(1).booleanValue());
+ }
+ }).toSet();
+ assertEquals(ImmutableSet.of(ROW2), values);
+ }
+
+ @Test
+ public void testList() throws InterruptedException {
+ List> rows = ImmutableList.copyOf(tableSnippets.list().values());
+ assertEquals(0, rows.size());
+
+ InsertAllResponse response = tableSnippets.insert("row1", "row2");
+ assertFalse(response.hasErrors());
+ rows = ImmutableList.copyOf(tableSnippets.list().values());
+ while (rows.isEmpty()) {
+ Thread.sleep(500);
+ rows = ImmutableList.copyOf(tableSnippets.list().values());
+ }
+ assertEquals(2, rows.size());
+ }
+
+ @Test
+ public void testCopy() {
+ tableSnippets.copy(COPY_DATASET_NAME, BASE_TABLE_NAME);
+ }
+
+ @Test
+ public void testCopyTableId() {
+ Job copyJob = tableSnippets.copyTableId(COPY_DATASET_NAME, getCopyTableName());
+ assertSuccessful(copyJob);
+ }
+
+ @Test
+ public void testExtractAndLoadList() {
+ String gcsFile1 = "gs://" + BUCKET_NAME + "/extractTestA_*.csv";
+ String gcsFile2 = "gs://" + BUCKET_NAME + "/extractTestB_*.csv";
+ Job extractJob = tableSnippets.extractList("CSV", gcsFile1, gcsFile2);
+ gcsFile1 = gcsFile1.replace("*", "000000000000");
+ gcsFile2 = gcsFile2.replace("*", "000000000000");
+ assertSuccessful(extractJob);
+ Job loadJob = tableSnippets.loadList(gcsFile1, gcsFile2);
+ assertSuccessful(loadJob);
+ }
+
+ @Test
+ public void testExtractAndLoadSingle() {
+ String gcsFile = "gs://" + BUCKET_NAME + "/extractTest.csv";
+ Job extractJob = tableSnippets.extractSingle("CSV", gcsFile);
+ assertSuccessful(extractJob);
+ Job loadJob = tableSnippets.loadSingle(gcsFile);
+ assertSuccessful(loadJob);
+ }
+
+ /**
+ * Verifies that the given table has the rows inserted by InsertTestRows().
+ *
+ * @param checkTable the table to query
+ */
+ private void verifyTestRows(Table checkTable) throws InterruptedException {
+ List> rows = waitForTableRows(checkTable, 2);
+ // Verify that the table data matches what it's supposed to.
+ Set> values =
+ FluentIterable.from(rows).transform(new Function, List>>() {
+ @Override
+ public List> apply(List row) {
+ return ImmutableList.of(row.get(0).stringValue(), row.get(1).booleanValue());
+ }
+ }).toSet();
+ assertEquals(ImmutableSet.of(ROW2, ROW1), values);
+ }
+
+ /**
+ * Waits for a specified number of rows to appear in the given table. This is used by
+ * verifyTestRows to wait for data to appear before verifying.
+ *
+ * @param checkTable the table to query
+ * @param numRows the expected number of rows
+ * @return the rows from the table
+ */
+ private List> waitForTableRows(Table checkTable, int numRows)
+ throws InterruptedException {
+ // Wait for the data to appear.
+ Page> page = checkTable.list(TableDataListOption.pageSize(100));
+ List> rows = ImmutableList.copyOf(page.values());
+ while (rows.size() != numRows) {
+ Thread.sleep(1000);
+ page = checkTable.list(TableDataListOption.pageSize(100));
+ rows = ImmutableList.copyOf(page.values());
+ }
+ return rows;
+ }
+
+ private void assertSuccessful(Job job) {
+ assertTrue(job.isDone());
+ assertNull(job.status().error());
+ }
+}