From 8cf3e20daecabdb1613c94b2d3bbff89d72f411e Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Wed, 28 Nov 2018 00:35:51 -0800 Subject: [PATCH] Regenerate dataproc client --- .../dataproc/v1/ClusterControllerGrpc.java | 10 +- .../v1/WorkflowTemplateServiceGrpc.java | 998 ++++++ .../cloud/dataproc/v1/AcceleratorConfig.java | 74 +- .../v1/AcceleratorConfigOrBuilder.java | 20 +- .../com/google/cloud/dataproc/v1/Cluster.java | 88 +- .../cloud/dataproc/v1/ClusterConfig.java | 339 +- .../dataproc/v1/ClusterConfigOrBuilder.java | 53 +- .../cloud/dataproc/v1/ClusterOperation.java | 807 +++++ .../dataproc/v1/ClusterOperationMetadata.java | 174 +- .../v1/ClusterOperationMetadataOrBuilder.java | 50 +- .../v1/ClusterOperationOrBuilder.java | 54 + .../dataproc/v1/ClusterOperationStatus.java | 66 +- .../v1/ClusterOperationStatusOrBuilder.java | 18 +- .../cloud/dataproc/v1/ClusterOrBuilder.java | 20 +- .../cloud/dataproc/v1/ClusterSelector.java | 931 +++++ .../dataproc/v1/ClusterSelectorOrBuilder.java | 92 + .../cloud/dataproc/v1/ClusterStatus.java | 78 +- .../dataproc/v1/ClusterStatusOrBuilder.java | 18 +- .../cloud/dataproc/v1/ClustersProto.java | 308 +- .../dataproc/v1/CreateClusterRequest.java | 211 ++ .../v1/CreateClusterRequestOrBuilder.java | 34 + .../v1/CreateWorkflowTemplateRequest.java | 820 +++++ ...reateWorkflowTemplateRequestOrBuilder.java | 56 + .../dataproc/v1/DeleteClusterRequest.java | 373 ++ .../v1/DeleteClusterRequestOrBuilder.java | 54 + .../v1/DeleteWorkflowTemplateRequest.java | 675 ++++ ...eleteWorkflowTemplateRequestOrBuilder.java | 42 + .../dataproc/v1/DiagnoseClusterResults.java | 14 +- .../v1/DiagnoseClusterResultsOrBuilder.java | 4 +- .../google/cloud/dataproc/v1/DiskConfig.java | 169 + .../dataproc/v1/DiskConfigOrBuilder.java | 22 + .../cloud/dataproc/v1/EncryptionConfig.java | 584 ++++ .../v1/EncryptionConfigOrBuilder.java | 29 + .../cloud/dataproc/v1/GceClusterConfig.java | 148 +- .../v1/GceClusterConfigOrBuilder.java | 46 +- .../v1/GetWorkflowTemplateRequest.java | 673 ++++ .../GetWorkflowTemplateRequestOrBuilder.java | 42 + .../dataproc/v1/InstanceGroupConfig.java | 216 +- .../v1/InstanceGroupConfigOrBuilder.java | 58 +- ...tantiateInlineWorkflowTemplateRequest.java | 1017 ++++++ ...nlineWorkflowTemplateRequestOrBuilder.java | 86 + .../InstantiateWorkflowTemplateRequest.java | 1211 +++++++ ...tiateWorkflowTemplateRequestOrBuilder.java | 133 + .../com/google/cloud/dataproc/v1/Job.java | 313 +- .../cloud/dataproc/v1/JobOrBuilder.java | 56 +- .../cloud/dataproc/v1/JobPlacement.java | 14 +- .../dataproc/v1/JobPlacementOrBuilder.java | 4 +- .../cloud/dataproc/v1/JobScheduling.java | 4 - .../google/cloud/dataproc/v1/JobStatus.java | 78 +- .../cloud/dataproc/v1/JobStatusOrBuilder.java | 18 +- .../google/cloud/dataproc/v1/JobsProto.java | 109 +- .../dataproc/v1/ListClustersResponse.java | 60 +- .../v1/ListClustersResponseOrBuilder.java | 14 +- .../cloud/dataproc/v1/ListJobsResponse.java | 46 +- .../v1/ListJobsResponseOrBuilder.java | 10 +- .../v1/ListWorkflowTemplatesRequest.java | 827 +++++ ...ListWorkflowTemplatesRequestOrBuilder.java | 60 + .../v1/ListWorkflowTemplatesResponse.java | 1031 ++++++ ...istWorkflowTemplatesResponseOrBuilder.java | 75 + .../cloud/dataproc/v1/ManagedCluster.java | 1222 +++++++ .../dataproc/v1/ManagedClusterOrBuilder.java | 144 + .../cloud/dataproc/v1/ManagedGroupConfig.java | 28 +- .../v1/ManagedGroupConfigOrBuilder.java | 8 +- .../dataproc/v1/NodeInitializationAction.java | 14 +- .../v1/NodeInitializationActionOrBuilder.java | 4 +- .../cloud/dataproc/v1/OperationsProto.java | 48 +- .../google/cloud/dataproc/v1/OrderedJob.java | 3065 +++++++++++++++++ .../dataproc/v1/OrderedJobOrBuilder.java | 343 ++ .../dataproc/v1/ParameterValidation.java | 994 ++++++ .../v1/ParameterValidationOrBuilder.java | 61 + .../cloud/dataproc/v1/RegexValidation.java | 672 ++++ .../dataproc/v1/RegexValidationOrBuilder.java | 52 + .../google/cloud/dataproc/v1/RegionName.java | 189 + .../cloud/dataproc/v1/SoftwareConfig.java | 56 +- .../dataproc/v1/SoftwareConfigOrBuilder.java | 16 +- .../cloud/dataproc/v1/SubmitJobRequest.java | 211 ++ .../v1/SubmitJobRequestOrBuilder.java | 34 + .../cloud/dataproc/v1/TemplateParameter.java | 1708 +++++++++ .../v1/TemplateParameterOrBuilder.java | 259 ++ .../dataproc/v1/UpdateClusterRequest.java | 512 +++ .../v1/UpdateClusterRequestOrBuilder.java | 77 + .../v1/UpdateWorkflowTemplateRequest.java | 663 ++++ ...pdateWorkflowTemplateRequestOrBuilder.java | 37 + .../cloud/dataproc/v1/ValueValidation.java | 646 ++++ .../dataproc/v1/ValueValidationOrBuilder.java | 44 + .../cloud/dataproc/v1/WorkflowGraph.java | 859 +++++ .../dataproc/v1/WorkflowGraphOrBuilder.java | 53 + .../cloud/dataproc/v1/WorkflowMetadata.java | 2697 +++++++++++++++ .../v1/WorkflowMetadataOrBuilder.java | 269 ++ .../cloud/dataproc/v1/WorkflowNode.java | 1415 ++++++++ .../dataproc/v1/WorkflowNodeOrBuilder.java | 115 + .../cloud/dataproc/v1/WorkflowTemplate.java | 2882 ++++++++++++++++ .../dataproc/v1/WorkflowTemplateName.java | 212 ++ .../v1/WorkflowTemplateOrBuilder.java | 338 ++ .../v1/WorkflowTemplatePlacement.java | 1020 ++++++ .../WorkflowTemplatePlacementOrBuilder.java | 67 + .../dataproc/v1/WorkflowTemplatesProto.java | 514 +++ .../google/cloud/dataproc/v1/clusters.proto | 204 +- .../proto/google/cloud/dataproc/v1/jobs.proto | 75 +- .../google/cloud/dataproc/v1/operations.proto | 29 +- .../dataproc/v1/workflow_templates.proto | 662 ++++ .../cloud/dataproc/v1beta2/Cluster.java | 24 +- .../cloud/dataproc/v1beta2/ClusterConfig.java | 229 ++ .../v1beta2/ClusterConfigOrBuilder.java | 25 + .../dataproc/v1beta2/ClusterOrBuilder.java | 6 +- .../cloud/dataproc/v1beta2/ClustersProto.java | 272 +- .../dataproc/v1beta2/EncryptionConfig.java | 584 ++++ .../v1beta2/EncryptionConfigOrBuilder.java | 29 + .../dataproc/v1beta2/InstanceGroupConfig.java | 35 +- .../v1beta2/InstanceGroupConfigOrBuilder.java | 10 +- ...tantiateInlineWorkflowTemplateRequest.java | 205 +- ...nlineWorkflowTemplateRequestOrBuilder.java | 26 +- .../InstantiateWorkflowTemplateRequest.java | 538 ++- ...tiateWorkflowTemplateRequestOrBuilder.java | 85 +- .../google/cloud/dataproc/v1beta2/Job.java | 335 +- .../cloud/dataproc/v1beta2/JobOrBuilder.java | 42 + .../cloud/dataproc/v1beta2/JobsProto.java | 17 +- .../dataproc/v1beta2/LifecycleConfig.java | 148 +- .../v1beta2/LifecycleConfigOrBuilder.java | 36 +- .../dataproc/v1beta2/ParameterValidation.java | 994 ++++++ .../v1beta2/ParameterValidationOrBuilder.java | 61 + .../dataproc/v1beta2/RegexValidation.java | 672 ++++ .../v1beta2/RegexValidationOrBuilder.java | 52 + .../dataproc/v1beta2/TemplateParameter.java | 1695 +++++++++ .../v1beta2/TemplateParameterOrBuilder.java | 255 ++ .../dataproc/v1beta2/ValueValidation.java | 646 ++++ .../v1beta2/ValueValidationOrBuilder.java | 44 + .../dataproc/v1beta2/WorkflowMetadata.java | 627 +++- .../v1beta2/WorkflowMetadataOrBuilder.java | 72 +- .../dataproc/v1beta2/WorkflowTemplate.java | 481 +++ .../v1beta2/WorkflowTemplateOrBuilder.java | 54 + .../v1beta2/WorkflowTemplatesProto.java | 370 +- .../cloud/dataproc/v1beta2/clusters.proto | 36 +- .../google/cloud/dataproc/v1beta2/jobs.proto | 12 +- .../cloud/dataproc/v1beta2/operations.proto | 3 +- .../cloud/dataproc/v1beta2/shared.proto | 3 +- .../dataproc/v1beta2/workflow_templates.proto | 135 +- .../dataproc/v1/ClusterControllerClient.java | 111 +- .../dataproc/v1/JobControllerClient.java | 40 + .../v1/WorkflowTemplateServiceClient.java | 1086 ++++++ .../v1/WorkflowTemplateServiceSettings.java | 301 ++ .../cloud/dataproc/v1/package-info.java | 19 +- .../stub/ClusterControllerStubSettings.java | 16 +- ...orkflowTemplateServiceCallableFactory.java | 116 + .../stub/GrpcWorkflowTemplateServiceStub.java | 372 ++ .../v1/stub/JobControllerStubSettings.java | 14 +- .../v1/stub/WorkflowTemplateServiceStub.java | 111 + .../WorkflowTemplateServiceStubSettings.java | 639 ++++ .../v1beta2/ClusterControllerClient.java | 111 + .../dataproc/v1beta2/JobControllerClient.java | 40 + .../WorkflowTemplateServiceClient.java | 413 ++- .../WorkflowTemplateServiceSettings.java | 31 + .../cloud/dataproc/v1beta2/package-info.java | 4 +- .../stub/ClusterControllerStubSettings.java | 16 +- .../stub/GrpcWorkflowTemplateServiceStub.java | 43 + .../stub/JobControllerStubSettings.java | 12 +- .../stub/WorkflowTemplateServiceStub.java | 14 + .../WorkflowTemplateServiceStubSettings.java | 94 +- .../v1/ClusterControllerClientTest.java | 127 +- .../dataproc/v1/JobControllerClientTest.java | 62 +- .../v1/MockWorkflowTemplateService.java | 57 + .../v1/MockWorkflowTemplateServiceImpl.java | 167 + .../v1/WorkflowTemplateServiceClientTest.java | 481 +++ .../v1beta2/ClusterControllerClientTest.java | 122 + .../v1beta2/JobControllerClientTest.java | 63 + .../MockWorkflowTemplateServiceImpl.java | 16 + .../WorkflowTemplateServiceClientTest.java | 166 +- 167 files changed, 48000 insertions(+), 1874 deletions(-) create mode 100644 google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceGrpc.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelector.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelectorOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfig.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfigOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponse.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponseOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedCluster.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedClusterOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJob.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJobOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidationOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidationOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameter.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameterOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequest.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequestOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidationOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraph.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraphOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadata.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadataOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNode.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNodeOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplate.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacement.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacementOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatesProto.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/workflow_templates.proto create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfig.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfigOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidationOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidationOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidation.java create mode 100644 google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidationOrBuilder.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStub.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java create mode 100644 google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java diff --git a/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerGrpc.java b/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerGrpc.java index 3dca90f85648..e80a3f30db5f 100644 --- a/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerGrpc.java +++ b/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerGrpc.java @@ -18,7 +18,7 @@ /** *
  * The ClusterControllerService provides methods to manage clusters
- * of Google Compute Engine instances.
+ * of Compute Engine instances.
  * 
*/ @javax.annotation.Generated( @@ -280,7 +280,7 @@ public static ClusterControllerFutureStub newFutureStub( /** *
    * The ClusterControllerService provides methods to manage clusters
-   * of Google Compute Engine instances.
+   * of Compute Engine instances.
    * 
*/ public static abstract class ClusterControllerImplBase implements io.grpc.BindableService { @@ -398,7 +398,7 @@ public void diagnoseCluster(com.google.cloud.dataproc.v1.DiagnoseClusterRequest /** *
    * The ClusterControllerService provides methods to manage clusters
-   * of Google Compute Engine instances.
+   * of Compute Engine instances.
    * 
*/ public static final class ClusterControllerStub extends io.grpc.stub.AbstractStub { @@ -489,7 +489,7 @@ public void diagnoseCluster(com.google.cloud.dataproc.v1.DiagnoseClusterRequest /** *
    * The ClusterControllerService provides methods to manage clusters
-   * of Google Compute Engine instances.
+   * of Compute Engine instances.
    * 
*/ public static final class ClusterControllerBlockingStub extends io.grpc.stub.AbstractStub { @@ -574,7 +574,7 @@ public com.google.longrunning.Operation diagnoseCluster(com.google.cloud.datapro /** *
    * The ClusterControllerService provides methods to manage clusters
-   * of Google Compute Engine instances.
+   * of Compute Engine instances.
    * 
*/ public static final class ClusterControllerFutureStub extends io.grpc.stub.AbstractStub { diff --git a/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceGrpc.java b/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceGrpc.java new file mode 100644 index 000000000000..18768b3171e2 --- /dev/null +++ b/google-api-grpc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceGrpc.java @@ -0,0 +1,998 @@ +package com.google.cloud.dataproc.v1; + +import static io.grpc.MethodDescriptor.generateFullMethodName; +import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; +import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; +import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; +import static io.grpc.stub.ClientCalls.asyncUnaryCall; +import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; +import static io.grpc.stub.ClientCalls.blockingUnaryCall; +import static io.grpc.stub.ClientCalls.futureUnaryCall; +import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; +import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; +import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; +import static io.grpc.stub.ServerCalls.asyncUnaryCall; +import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall; +import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall; + +/** + *
+ * The API interface for managing Workflow Templates in the
+ * Cloud Dataproc API.
+ * 
+ */ +@javax.annotation.Generated( + value = "by gRPC proto compiler (version 1.10.0)", + comments = "Source: google/cloud/dataproc/v1/workflow_templates.proto") +public final class WorkflowTemplateServiceGrpc { + + private WorkflowTemplateServiceGrpc() {} + + public static final String SERVICE_NAME = "google.cloud.dataproc.v1.WorkflowTemplateService"; + + // Static method descriptors that strictly reflect the proto. + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getCreateWorkflowTemplateMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_CREATE_WORKFLOW_TEMPLATE = getCreateWorkflowTemplateMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getCreateWorkflowTemplateMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getCreateWorkflowTemplateMethod() { + return getCreateWorkflowTemplateMethodHelper(); + } + + private static io.grpc.MethodDescriptor getCreateWorkflowTemplateMethodHelper() { + io.grpc.MethodDescriptor getCreateWorkflowTemplateMethod; + if ((getCreateWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getCreateWorkflowTemplateMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getCreateWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getCreateWorkflowTemplateMethod) == null) { + WorkflowTemplateServiceGrpc.getCreateWorkflowTemplateMethod = getCreateWorkflowTemplateMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "CreateWorkflowTemplate")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("CreateWorkflowTemplate")) + .build(); + } + } + } + return getCreateWorkflowTemplateMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getGetWorkflowTemplateMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_GET_WORKFLOW_TEMPLATE = getGetWorkflowTemplateMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getGetWorkflowTemplateMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getGetWorkflowTemplateMethod() { + return getGetWorkflowTemplateMethodHelper(); + } + + private static io.grpc.MethodDescriptor getGetWorkflowTemplateMethodHelper() { + io.grpc.MethodDescriptor getGetWorkflowTemplateMethod; + if ((getGetWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getGetWorkflowTemplateMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getGetWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getGetWorkflowTemplateMethod) == null) { + WorkflowTemplateServiceGrpc.getGetWorkflowTemplateMethod = getGetWorkflowTemplateMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "GetWorkflowTemplate")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("GetWorkflowTemplate")) + .build(); + } + } + } + return getGetWorkflowTemplateMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getInstantiateWorkflowTemplateMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_INSTANTIATE_WORKFLOW_TEMPLATE = getInstantiateWorkflowTemplateMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getInstantiateWorkflowTemplateMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getInstantiateWorkflowTemplateMethod() { + return getInstantiateWorkflowTemplateMethodHelper(); + } + + private static io.grpc.MethodDescriptor getInstantiateWorkflowTemplateMethodHelper() { + io.grpc.MethodDescriptor getInstantiateWorkflowTemplateMethod; + if ((getInstantiateWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getInstantiateWorkflowTemplateMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getInstantiateWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getInstantiateWorkflowTemplateMethod) == null) { + WorkflowTemplateServiceGrpc.getInstantiateWorkflowTemplateMethod = getInstantiateWorkflowTemplateMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "InstantiateWorkflowTemplate")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.longrunning.Operation.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("InstantiateWorkflowTemplate")) + .build(); + } + } + } + return getInstantiateWorkflowTemplateMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getInstantiateInlineWorkflowTemplateMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_INSTANTIATE_INLINE_WORKFLOW_TEMPLATE = getInstantiateInlineWorkflowTemplateMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getInstantiateInlineWorkflowTemplateMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getInstantiateInlineWorkflowTemplateMethod() { + return getInstantiateInlineWorkflowTemplateMethodHelper(); + } + + private static io.grpc.MethodDescriptor getInstantiateInlineWorkflowTemplateMethodHelper() { + io.grpc.MethodDescriptor getInstantiateInlineWorkflowTemplateMethod; + if ((getInstantiateInlineWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getInstantiateInlineWorkflowTemplateMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getInstantiateInlineWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getInstantiateInlineWorkflowTemplateMethod) == null) { + WorkflowTemplateServiceGrpc.getInstantiateInlineWorkflowTemplateMethod = getInstantiateInlineWorkflowTemplateMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "InstantiateInlineWorkflowTemplate")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.longrunning.Operation.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("InstantiateInlineWorkflowTemplate")) + .build(); + } + } + } + return getInstantiateInlineWorkflowTemplateMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getUpdateWorkflowTemplateMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_UPDATE_WORKFLOW_TEMPLATE = getUpdateWorkflowTemplateMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getUpdateWorkflowTemplateMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getUpdateWorkflowTemplateMethod() { + return getUpdateWorkflowTemplateMethodHelper(); + } + + private static io.grpc.MethodDescriptor getUpdateWorkflowTemplateMethodHelper() { + io.grpc.MethodDescriptor getUpdateWorkflowTemplateMethod; + if ((getUpdateWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getUpdateWorkflowTemplateMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getUpdateWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getUpdateWorkflowTemplateMethod) == null) { + WorkflowTemplateServiceGrpc.getUpdateWorkflowTemplateMethod = getUpdateWorkflowTemplateMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "UpdateWorkflowTemplate")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("UpdateWorkflowTemplate")) + .build(); + } + } + } + return getUpdateWorkflowTemplateMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getListWorkflowTemplatesMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_LIST_WORKFLOW_TEMPLATES = getListWorkflowTemplatesMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getListWorkflowTemplatesMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getListWorkflowTemplatesMethod() { + return getListWorkflowTemplatesMethodHelper(); + } + + private static io.grpc.MethodDescriptor getListWorkflowTemplatesMethodHelper() { + io.grpc.MethodDescriptor getListWorkflowTemplatesMethod; + if ((getListWorkflowTemplatesMethod = WorkflowTemplateServiceGrpc.getListWorkflowTemplatesMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getListWorkflowTemplatesMethod = WorkflowTemplateServiceGrpc.getListWorkflowTemplatesMethod) == null) { + WorkflowTemplateServiceGrpc.getListWorkflowTemplatesMethod = getListWorkflowTemplatesMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "ListWorkflowTemplates")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("ListWorkflowTemplates")) + .build(); + } + } + } + return getListWorkflowTemplatesMethod; + } + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + @java.lang.Deprecated // Use {@link #getDeleteWorkflowTemplateMethod()} instead. + public static final io.grpc.MethodDescriptor METHOD_DELETE_WORKFLOW_TEMPLATE = getDeleteWorkflowTemplateMethodHelper(); + + private static volatile io.grpc.MethodDescriptor getDeleteWorkflowTemplateMethod; + + @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") + public static io.grpc.MethodDescriptor getDeleteWorkflowTemplateMethod() { + return getDeleteWorkflowTemplateMethodHelper(); + } + + private static io.grpc.MethodDescriptor getDeleteWorkflowTemplateMethodHelper() { + io.grpc.MethodDescriptor getDeleteWorkflowTemplateMethod; + if ((getDeleteWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getDeleteWorkflowTemplateMethod) == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + if ((getDeleteWorkflowTemplateMethod = WorkflowTemplateServiceGrpc.getDeleteWorkflowTemplateMethod) == null) { + WorkflowTemplateServiceGrpc.getDeleteWorkflowTemplateMethod = getDeleteWorkflowTemplateMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService", "DeleteWorkflowTemplate")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.protobuf.Empty.getDefaultInstance())) + .setSchemaDescriptor(new WorkflowTemplateServiceMethodDescriptorSupplier("DeleteWorkflowTemplate")) + .build(); + } + } + } + return getDeleteWorkflowTemplateMethod; + } + + /** + * Creates a new async stub that supports all call types for the service + */ + public static WorkflowTemplateServiceStub newStub(io.grpc.Channel channel) { + return new WorkflowTemplateServiceStub(channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static WorkflowTemplateServiceBlockingStub newBlockingStub( + io.grpc.Channel channel) { + return new WorkflowTemplateServiceBlockingStub(channel); + } + + /** + * Creates a new ListenableFuture-style stub that supports unary calls on the service + */ + public static WorkflowTemplateServiceFutureStub newFutureStub( + io.grpc.Channel channel) { + return new WorkflowTemplateServiceFutureStub(channel); + } + + /** + *
+   * The API interface for managing Workflow Templates in the
+   * Cloud Dataproc API.
+   * 
+ */ + public static abstract class WorkflowTemplateServiceImplBase implements io.grpc.BindableService { + + /** + *
+     * Creates new workflow template.
+     * 
+ */ + public void createWorkflowTemplate(com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getCreateWorkflowTemplateMethodHelper(), responseObserver); + } + + /** + *
+     * Retrieves the latest workflow template.
+     * Can retrieve previously instantiated template by specifying optional
+     * version parameter.
+     * 
+ */ + public void getWorkflowTemplate(com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getGetWorkflowTemplateMethodHelper(), responseObserver); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public void instantiateWorkflowTemplate(com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getInstantiateWorkflowTemplateMethodHelper(), responseObserver); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * This method is equivalent to executing the sequence
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public void instantiateInlineWorkflowTemplate(com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getInstantiateInlineWorkflowTemplateMethodHelper(), responseObserver); + } + + /** + *
+     * Updates (replaces) workflow template. The updated template
+     * must contain version that matches the current server version.
+     * 
+ */ + public void updateWorkflowTemplate(com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getUpdateWorkflowTemplateMethodHelper(), responseObserver); + } + + /** + *
+     * Lists workflows that match the specified filter in the request.
+     * 
+ */ + public void listWorkflowTemplates(com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getListWorkflowTemplatesMethodHelper(), responseObserver); + } + + /** + *
+     * Deletes a workflow template. It does not cancel in-progress workflows.
+     * 
+ */ + public void deleteWorkflowTemplate(com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnimplementedUnaryCall(getDeleteWorkflowTemplateMethodHelper(), responseObserver); + } + + @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getCreateWorkflowTemplateMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest, + com.google.cloud.dataproc.v1.WorkflowTemplate>( + this, METHODID_CREATE_WORKFLOW_TEMPLATE))) + .addMethod( + getGetWorkflowTemplateMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest, + com.google.cloud.dataproc.v1.WorkflowTemplate>( + this, METHODID_GET_WORKFLOW_TEMPLATE))) + .addMethod( + getInstantiateWorkflowTemplateMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest, + com.google.longrunning.Operation>( + this, METHODID_INSTANTIATE_WORKFLOW_TEMPLATE))) + .addMethod( + getInstantiateInlineWorkflowTemplateMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest, + com.google.longrunning.Operation>( + this, METHODID_INSTANTIATE_INLINE_WORKFLOW_TEMPLATE))) + .addMethod( + getUpdateWorkflowTemplateMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest, + com.google.cloud.dataproc.v1.WorkflowTemplate>( + this, METHODID_UPDATE_WORKFLOW_TEMPLATE))) + .addMethod( + getListWorkflowTemplatesMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest, + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse>( + this, METHODID_LIST_WORKFLOW_TEMPLATES))) + .addMethod( + getDeleteWorkflowTemplateMethodHelper(), + asyncUnaryCall( + new MethodHandlers< + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest, + com.google.protobuf.Empty>( + this, METHODID_DELETE_WORKFLOW_TEMPLATE))) + .build(); + } + } + + /** + *
+   * The API interface for managing Workflow Templates in the
+   * Cloud Dataproc API.
+   * 
+ */ + public static final class WorkflowTemplateServiceStub extends io.grpc.stub.AbstractStub { + private WorkflowTemplateServiceStub(io.grpc.Channel channel) { + super(channel); + } + + private WorkflowTemplateServiceStub(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected WorkflowTemplateServiceStub build(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + return new WorkflowTemplateServiceStub(channel, callOptions); + } + + /** + *
+     * Creates new workflow template.
+     * 
+ */ + public void createWorkflowTemplate(com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getCreateWorkflowTemplateMethodHelper(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Retrieves the latest workflow template.
+     * Can retrieve previously instantiated template by specifying optional
+     * version parameter.
+     * 
+ */ + public void getWorkflowTemplate(com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getGetWorkflowTemplateMethodHelper(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public void instantiateWorkflowTemplate(com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getInstantiateWorkflowTemplateMethodHelper(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * This method is equivalent to executing the sequence
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public void instantiateInlineWorkflowTemplate(com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getInstantiateInlineWorkflowTemplateMethodHelper(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Updates (replaces) workflow template. The updated template
+     * must contain version that matches the current server version.
+     * 
+ */ + public void updateWorkflowTemplate(com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getUpdateWorkflowTemplateMethodHelper(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Lists workflows that match the specified filter in the request.
+     * 
+ */ + public void listWorkflowTemplates(com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getListWorkflowTemplatesMethodHelper(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Deletes a workflow template. It does not cancel in-progress workflows.
+     * 
+ */ + public void deleteWorkflowTemplate(com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest request, + io.grpc.stub.StreamObserver responseObserver) { + asyncUnaryCall( + getChannel().newCall(getDeleteWorkflowTemplateMethodHelper(), getCallOptions()), request, responseObserver); + } + } + + /** + *
+   * The API interface for managing Workflow Templates in the
+   * Cloud Dataproc API.
+   * 
+ */ + public static final class WorkflowTemplateServiceBlockingStub extends io.grpc.stub.AbstractStub { + private WorkflowTemplateServiceBlockingStub(io.grpc.Channel channel) { + super(channel); + } + + private WorkflowTemplateServiceBlockingStub(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected WorkflowTemplateServiceBlockingStub build(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + return new WorkflowTemplateServiceBlockingStub(channel, callOptions); + } + + /** + *
+     * Creates new workflow template.
+     * 
+ */ + public com.google.cloud.dataproc.v1.WorkflowTemplate createWorkflowTemplate(com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest request) { + return blockingUnaryCall( + getChannel(), getCreateWorkflowTemplateMethodHelper(), getCallOptions(), request); + } + + /** + *
+     * Retrieves the latest workflow template.
+     * Can retrieve previously instantiated template by specifying optional
+     * version parameter.
+     * 
+ */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getWorkflowTemplate(com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest request) { + return blockingUnaryCall( + getChannel(), getGetWorkflowTemplateMethodHelper(), getCallOptions(), request); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public com.google.longrunning.Operation instantiateWorkflowTemplate(com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest request) { + return blockingUnaryCall( + getChannel(), getInstantiateWorkflowTemplateMethodHelper(), getCallOptions(), request); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * This method is equivalent to executing the sequence
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public com.google.longrunning.Operation instantiateInlineWorkflowTemplate(com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest request) { + return blockingUnaryCall( + getChannel(), getInstantiateInlineWorkflowTemplateMethodHelper(), getCallOptions(), request); + } + + /** + *
+     * Updates (replaces) workflow template. The updated template
+     * must contain version that matches the current server version.
+     * 
+ */ + public com.google.cloud.dataproc.v1.WorkflowTemplate updateWorkflowTemplate(com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest request) { + return blockingUnaryCall( + getChannel(), getUpdateWorkflowTemplateMethodHelper(), getCallOptions(), request); + } + + /** + *
+     * Lists workflows that match the specified filter in the request.
+     * 
+ */ + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse listWorkflowTemplates(com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest request) { + return blockingUnaryCall( + getChannel(), getListWorkflowTemplatesMethodHelper(), getCallOptions(), request); + } + + /** + *
+     * Deletes a workflow template. It does not cancel in-progress workflows.
+     * 
+ */ + public com.google.protobuf.Empty deleteWorkflowTemplate(com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest request) { + return blockingUnaryCall( + getChannel(), getDeleteWorkflowTemplateMethodHelper(), getCallOptions(), request); + } + } + + /** + *
+   * The API interface for managing Workflow Templates in the
+   * Cloud Dataproc API.
+   * 
+ */ + public static final class WorkflowTemplateServiceFutureStub extends io.grpc.stub.AbstractStub { + private WorkflowTemplateServiceFutureStub(io.grpc.Channel channel) { + super(channel); + } + + private WorkflowTemplateServiceFutureStub(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected WorkflowTemplateServiceFutureStub build(io.grpc.Channel channel, + io.grpc.CallOptions callOptions) { + return new WorkflowTemplateServiceFutureStub(channel, callOptions); + } + + /** + *
+     * Creates new workflow template.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture createWorkflowTemplate( + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest request) { + return futureUnaryCall( + getChannel().newCall(getCreateWorkflowTemplateMethodHelper(), getCallOptions()), request); + } + + /** + *
+     * Retrieves the latest workflow template.
+     * Can retrieve previously instantiated template by specifying optional
+     * version parameter.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture getWorkflowTemplate( + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest request) { + return futureUnaryCall( + getChannel().newCall(getGetWorkflowTemplateMethodHelper(), getCallOptions()), request); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture instantiateWorkflowTemplate( + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest request) { + return futureUnaryCall( + getChannel().newCall(getInstantiateWorkflowTemplateMethodHelper(), getCallOptions()), request); + } + + /** + *
+     * Instantiates a template and begins execution.
+     * This method is equivalent to executing the sequence
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
+     * The returned Operation can be used to track execution of
+     * workflow by polling
+     * [operations.get][google.longrunning.Operations.GetOperation].
+     * The Operation will complete when entire workflow is finished.
+     * The running workflow can be aborted via
+     * [operations.cancel][google.longrunning.Operations.CancelOperation].
+     * This will cause any inflight jobs to be cancelled and workflow-owned
+     * clusters to be deleted.
+     * The [Operation.metadata][google.longrunning.Operation.metadata] will be
+     * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata].
+     * On successful completion,
+     * [Operation.response][google.longrunning.Operation.response] will be
+     * [Empty][google.protobuf.Empty].
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture instantiateInlineWorkflowTemplate( + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest request) { + return futureUnaryCall( + getChannel().newCall(getInstantiateInlineWorkflowTemplateMethodHelper(), getCallOptions()), request); + } + + /** + *
+     * Updates (replaces) workflow template. The updated template
+     * must contain version that matches the current server version.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture updateWorkflowTemplate( + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest request) { + return futureUnaryCall( + getChannel().newCall(getUpdateWorkflowTemplateMethodHelper(), getCallOptions()), request); + } + + /** + *
+     * Lists workflows that match the specified filter in the request.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture listWorkflowTemplates( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest request) { + return futureUnaryCall( + getChannel().newCall(getListWorkflowTemplatesMethodHelper(), getCallOptions()), request); + } + + /** + *
+     * Deletes a workflow template. It does not cancel in-progress workflows.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture deleteWorkflowTemplate( + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest request) { + return futureUnaryCall( + getChannel().newCall(getDeleteWorkflowTemplateMethodHelper(), getCallOptions()), request); + } + } + + private static final int METHODID_CREATE_WORKFLOW_TEMPLATE = 0; + private static final int METHODID_GET_WORKFLOW_TEMPLATE = 1; + private static final int METHODID_INSTANTIATE_WORKFLOW_TEMPLATE = 2; + private static final int METHODID_INSTANTIATE_INLINE_WORKFLOW_TEMPLATE = 3; + private static final int METHODID_UPDATE_WORKFLOW_TEMPLATE = 4; + private static final int METHODID_LIST_WORKFLOW_TEMPLATES = 5; + private static final int METHODID_DELETE_WORKFLOW_TEMPLATE = 6; + + private static final class MethodHandlers implements + io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + private final WorkflowTemplateServiceImplBase serviceImpl; + private final int methodId; + + MethodHandlers(WorkflowTemplateServiceImplBase serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_CREATE_WORKFLOW_TEMPLATE: + serviceImpl.createWorkflowTemplate((com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_GET_WORKFLOW_TEMPLATE: + serviceImpl.getWorkflowTemplate((com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_INSTANTIATE_WORKFLOW_TEMPLATE: + serviceImpl.instantiateWorkflowTemplate((com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_INSTANTIATE_INLINE_WORKFLOW_TEMPLATE: + serviceImpl.instantiateInlineWorkflowTemplate((com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_UPDATE_WORKFLOW_TEMPLATE: + serviceImpl.updateWorkflowTemplate((com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_LIST_WORKFLOW_TEMPLATES: + serviceImpl.listWorkflowTemplates((com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_DELETE_WORKFLOW_TEMPLATE: + serviceImpl.deleteWorkflowTemplate((com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + default: + throw new AssertionError(); + } + } + } + + private static abstract class WorkflowTemplateServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { + WorkflowTemplateServiceBaseDescriptorSupplier() {} + + @java.lang.Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.getDescriptor(); + } + + @java.lang.Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("WorkflowTemplateService"); + } + } + + private static final class WorkflowTemplateServiceFileDescriptorSupplier + extends WorkflowTemplateServiceBaseDescriptorSupplier { + WorkflowTemplateServiceFileDescriptorSupplier() {} + } + + private static final class WorkflowTemplateServiceMethodDescriptorSupplier + extends WorkflowTemplateServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + private final String methodName; + + WorkflowTemplateServiceMethodDescriptorSupplier(String methodName) { + this.methodName = methodName; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (WorkflowTemplateServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new WorkflowTemplateServiceFileDescriptorSupplier()) + .addMethod(getCreateWorkflowTemplateMethodHelper()) + .addMethod(getGetWorkflowTemplateMethodHelper()) + .addMethod(getInstantiateWorkflowTemplateMethodHelper()) + .addMethod(getInstantiateInlineWorkflowTemplateMethodHelper()) + .addMethod(getUpdateWorkflowTemplateMethodHelper()) + .addMethod(getListWorkflowTemplatesMethodHelper()) + .addMethod(getDeleteWorkflowTemplateMethodHelper()) + .build(); + } + } + } + return result; + } +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfig.java index 5d0dbf428de3..b0cf250eb7a7 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfig.java @@ -6,7 +6,7 @@ /** *
  * Specifies the type and number of accelerator cards attached to the instances
- * of an instance group (see [GPUs on Compute Engine](/compute/docs/gpus/)).
+ * of an instance. See [GPUs on Compute Engine](/compute/docs/gpus/).
  * 
* * Protobuf type {@code google.cloud.dataproc.v1.AcceleratorConfig} @@ -97,12 +97,16 @@ private AcceleratorConfig( /** *
    * Full URL, partial URI, or short name of the accelerator type resource to
-   * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-   * /compute/docs/reference/beta/acceleratorTypes)
-   * Examples
+   * expose to this instance. See
+   * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+   * Examples:
    * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `nvidia-tesla-k80`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the accelerator type
+   * resource, for example, `nvidia-tesla-k80`.
    * 
* * string accelerator_type_uri = 1; @@ -122,12 +126,16 @@ public java.lang.String getAcceleratorTypeUri() { /** *
    * Full URL, partial URI, or short name of the accelerator type resource to
-   * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-   * /compute/docs/reference/beta/acceleratorTypes)
-   * Examples
+   * expose to this instance. See
+   * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+   * Examples:
    * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `nvidia-tesla-k80`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the accelerator type
+   * resource, for example, `nvidia-tesla-k80`.
    * 
* * string accelerator_type_uri = 1; @@ -328,7 +336,7 @@ protected Builder newBuilderForType( /** *
    * Specifies the type and number of accelerator cards attached to the instances
-   * of an instance group (see [GPUs on Compute Engine](/compute/docs/gpus/)).
+   * of an instance. See [GPUs on Compute Engine](/compute/docs/gpus/).
    * 
* * Protobuf type {@code google.cloud.dataproc.v1.AcceleratorConfig} @@ -488,12 +496,16 @@ public Builder mergeFrom( /** *
      * Full URL, partial URI, or short name of the accelerator type resource to
-     * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-     * /compute/docs/reference/beta/acceleratorTypes)
-     * Examples
+     * expose to this instance. See
+     * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+     * Examples:
      * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `nvidia-tesla-k80`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the accelerator type
+     * resource, for example, `nvidia-tesla-k80`.
      * 
* * string accelerator_type_uri = 1; @@ -513,12 +525,16 @@ public java.lang.String getAcceleratorTypeUri() { /** *
      * Full URL, partial URI, or short name of the accelerator type resource to
-     * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-     * /compute/docs/reference/beta/acceleratorTypes)
-     * Examples
+     * expose to this instance. See
+     * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+     * Examples:
      * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `nvidia-tesla-k80`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the accelerator type
+     * resource, for example, `nvidia-tesla-k80`.
      * 
* * string accelerator_type_uri = 1; @@ -539,12 +555,16 @@ public java.lang.String getAcceleratorTypeUri() { /** *
      * Full URL, partial URI, or short name of the accelerator type resource to
-     * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-     * /compute/docs/reference/beta/acceleratorTypes)
-     * Examples
+     * expose to this instance. See
+     * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+     * Examples:
      * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `nvidia-tesla-k80`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the accelerator type
+     * resource, for example, `nvidia-tesla-k80`.
      * 
* * string accelerator_type_uri = 1; @@ -562,12 +582,16 @@ public Builder setAcceleratorTypeUri( /** *
      * Full URL, partial URI, or short name of the accelerator type resource to
-     * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-     * /compute/docs/reference/beta/acceleratorTypes)
-     * Examples
+     * expose to this instance. See
+     * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+     * Examples:
      * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `nvidia-tesla-k80`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the accelerator type
+     * resource, for example, `nvidia-tesla-k80`.
      * 
* * string accelerator_type_uri = 1; @@ -581,12 +605,16 @@ public Builder clearAcceleratorTypeUri() { /** *
      * Full URL, partial URI, or short name of the accelerator type resource to
-     * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-     * /compute/docs/reference/beta/acceleratorTypes)
-     * Examples
+     * expose to this instance. See
+     * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+     * Examples:
      * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
      * * `nvidia-tesla-k80`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the accelerator type
+     * resource, for example, `nvidia-tesla-k80`.
      * 
* * string accelerator_type_uri = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfigOrBuilder.java index 80c47e890c00..b16a20a66959 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AcceleratorConfigOrBuilder.java @@ -10,12 +10,16 @@ public interface AcceleratorConfigOrBuilder extends /** *
    * Full URL, partial URI, or short name of the accelerator type resource to
-   * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-   * /compute/docs/reference/beta/acceleratorTypes)
-   * Examples
+   * expose to this instance. See
+   * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+   * Examples:
    * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `nvidia-tesla-k80`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the accelerator type
+   * resource, for example, `nvidia-tesla-k80`.
    * 
* * string accelerator_type_uri = 1; @@ -24,12 +28,16 @@ public interface AcceleratorConfigOrBuilder extends /** *
    * Full URL, partial URI, or short name of the accelerator type resource to
-   * expose to this instance. See [Google Compute Engine AcceleratorTypes](
-   * /compute/docs/reference/beta/acceleratorTypes)
-   * Examples
+   * expose to this instance. See
+   * [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes).
+   * Examples:
    * * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80`
    * * `nvidia-tesla-k80`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the accelerator type
+   * resource, for example, `nvidia-tesla-k80`.
    * 
* * string accelerator_type_uri = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Cluster.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Cluster.java index 8bdae40576aa..62c6a78d7e85 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Cluster.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Cluster.java @@ -6,7 +6,7 @@ /** *
  * Describes the identifying information, config, and status of
- * a cluster of Google Compute Engine instances.
+ * a cluster of Compute Engine instances.
  * 
* * Protobuf type {@code google.cloud.dataproc.v1.Cluster} @@ -416,7 +416,7 @@ public java.lang.String getLabelsOrThrow( private com.google.cloud.dataproc.v1.ClusterStatus status_; /** *
-   * Output-only. Cluster status.
+   * Output only. Cluster status.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -426,7 +426,7 @@ public boolean hasStatus() { } /** *
-   * Output-only. Cluster status.
+   * Output only. Cluster status.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -436,7 +436,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus getStatus() { } /** *
-   * Output-only. Cluster status.
+   * Output only. Cluster status.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -449,7 +449,7 @@ public com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusOrBuilder() private java.util.List statusHistory_; /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -459,7 +459,7 @@ public java.util.List getStatusHisto } /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -470,7 +470,7 @@ public java.util.List getStatusHisto } /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -480,7 +480,7 @@ public int getStatusHistoryCount() { } /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -490,7 +490,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus getStatusHistory(int index) { } /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -504,7 +504,7 @@ public com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusHistoryOrBui private volatile java.lang.Object clusterUuid_; /** *
-   * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+   * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
    * generates this value when it creates the cluster.
    * 
* @@ -524,7 +524,7 @@ public java.lang.String getClusterUuid() { } /** *
-   * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+   * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
    * generates this value when it creates the cluster.
    * 
* @@ -844,7 +844,7 @@ protected Builder newBuilderForType( /** *
    * Describes the identifying information, config, and status of
-   * a cluster of Google Compute Engine instances.
+   * a cluster of Compute Engine instances.
    * 
* * Protobuf type {@code google.cloud.dataproc.v1.Cluster} @@ -1659,7 +1659,7 @@ public Builder putAllLabels( com.google.cloud.dataproc.v1.ClusterStatus, com.google.cloud.dataproc.v1.ClusterStatus.Builder, com.google.cloud.dataproc.v1.ClusterStatusOrBuilder> statusBuilder_; /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1669,7 +1669,7 @@ public boolean hasStatus() { } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1683,7 +1683,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus getStatus() { } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1703,7 +1703,7 @@ public Builder setStatus(com.google.cloud.dataproc.v1.ClusterStatus value) { } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1721,7 +1721,7 @@ public Builder setStatus( } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1743,7 +1743,7 @@ public Builder mergeStatus(com.google.cloud.dataproc.v1.ClusterStatus value) { } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1761,7 +1761,7 @@ public Builder clearStatus() { } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1773,7 +1773,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.Builder getStatusBuilder() { } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1788,7 +1788,7 @@ public com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusOrBuilder() } /** *
-     * Output-only. Cluster status.
+     * Output only. Cluster status.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -1821,7 +1821,7 @@ private void ensureStatusHistoryIsMutable() { /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1835,7 +1835,7 @@ public java.util.List getStatusHisto } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1849,7 +1849,7 @@ public int getStatusHistoryCount() { } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1863,7 +1863,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus getStatusHistory(int index) { } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1884,7 +1884,7 @@ public Builder setStatusHistory( } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1902,7 +1902,7 @@ public Builder setStatusHistory( } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1922,7 +1922,7 @@ public Builder addStatusHistory(com.google.cloud.dataproc.v1.ClusterStatus value } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1943,7 +1943,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1961,7 +1961,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1979,7 +1979,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -1998,7 +1998,7 @@ public Builder addAllStatusHistory( } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2015,7 +2015,7 @@ public Builder clearStatusHistory() { } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2032,7 +2032,7 @@ public Builder removeStatusHistory(int index) { } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2043,7 +2043,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.Builder getStatusHistoryBuilde } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2057,7 +2057,7 @@ public com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusHistoryOrBui } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2072,7 +2072,7 @@ public com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusHistoryOrBui } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2083,7 +2083,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.Builder addStatusHistoryBuilde } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2095,7 +2095,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.Builder addStatusHistoryBuilde } /** *
-     * Output-only. The previous cluster status.
+     * Output only. The previous cluster status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -2122,7 +2122,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.Builder addStatusHistoryBuilde private java.lang.Object clusterUuid_ = ""; /** *
-     * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+     * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
      * generates this value when it creates the cluster.
      * 
* @@ -2142,7 +2142,7 @@ public java.lang.String getClusterUuid() { } /** *
-     * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+     * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
      * generates this value when it creates the cluster.
      * 
* @@ -2163,7 +2163,7 @@ public java.lang.String getClusterUuid() { } /** *
-     * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+     * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
      * generates this value when it creates the cluster.
      * 
* @@ -2181,7 +2181,7 @@ public Builder setClusterUuid( } /** *
-     * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+     * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
      * generates this value when it creates the cluster.
      * 
* @@ -2195,7 +2195,7 @@ public Builder clearClusterUuid() { } /** *
-     * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+     * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
      * generates this value when it creates the cluster.
      * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfig.java index 4fb1f652f91f..ee07dd84957b 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfig.java @@ -128,6 +128,19 @@ private ClusterConfig( break; } + case 122: { + com.google.cloud.dataproc.v1.EncryptionConfig.Builder subBuilder = null; + if (encryptionConfig_ != null) { + subBuilder = encryptionConfig_.toBuilder(); + } + encryptionConfig_ = input.readMessage(com.google.cloud.dataproc.v1.EncryptionConfig.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionConfig_); + encryptionConfig_ = subBuilder.buildPartial(); + } + + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -168,7 +181,7 @@ private ClusterConfig( private volatile java.lang.Object configBucket_; /** *
-   * Optional. A Google Cloud Storage staging bucket used for sharing generated
+   * Optional. A Cloud Storage staging bucket used for sharing generated
    * SSH keys and config. If you do not specify a staging bucket, Cloud
    * Dataproc will determine an appropriate Cloud Storage location (US,
    * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -192,7 +205,7 @@ public java.lang.String getConfigBucket() {
   }
   /**
    * 
-   * Optional. A Google Cloud Storage staging bucket used for sharing generated
+   * Optional. A Cloud Storage staging bucket used for sharing generated
    * SSH keys and config. If you do not specify a staging bucket, Cloud
    * Dataproc will determine an appropriate Cloud Storage location (US,
    * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -220,7 +233,7 @@ public java.lang.String getConfigBucket() {
   private com.google.cloud.dataproc.v1.GceClusterConfig gceClusterConfig_;
   /**
    * 
-   * Required. The shared Google Compute Engine config settings for
+   * Required. The shared Compute Engine config settings for
    * all instances in a cluster.
    * 
* @@ -231,7 +244,7 @@ public boolean hasGceClusterConfig() { } /** *
-   * Required. The shared Google Compute Engine config settings for
+   * Required. The shared Compute Engine config settings for
    * all instances in a cluster.
    * 
* @@ -242,7 +255,7 @@ public com.google.cloud.dataproc.v1.GceClusterConfig getGceClusterConfig() { } /** *
-   * Required. The shared Google Compute Engine config settings for
+   * Required. The shared Compute Engine config settings for
    * all instances in a cluster.
    * 
* @@ -256,7 +269,7 @@ public com.google.cloud.dataproc.v1.GceClusterConfigOrBuilder getGceClusterConfi private com.google.cloud.dataproc.v1.InstanceGroupConfig masterConfig_; /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * the master instance in a cluster.
    * 
* @@ -267,7 +280,7 @@ public boolean hasMasterConfig() { } /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * the master instance in a cluster.
    * 
* @@ -278,7 +291,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig getMasterConfig() { } /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * the master instance in a cluster.
    * 
* @@ -292,7 +305,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getMasterConfig private com.google.cloud.dataproc.v1.InstanceGroupConfig workerConfig_; /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * worker instances in a cluster.
    * 
* @@ -303,7 +316,7 @@ public boolean hasWorkerConfig() { } /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * worker instances in a cluster.
    * 
* @@ -314,7 +327,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig getWorkerConfig() { } /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * worker instances in a cluster.
    * 
* @@ -328,7 +341,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getWorkerConfig private com.google.cloud.dataproc.v1.InstanceGroupConfig secondaryWorkerConfig_; /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * additional worker instances in a cluster.
    * 
* @@ -339,7 +352,7 @@ public boolean hasSecondaryWorkerConfig() { } /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * additional worker instances in a cluster.
    * 
* @@ -350,7 +363,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig getSecondaryWorkerConfig } /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * additional worker instances in a cluster.
    * 
* @@ -493,6 +506,39 @@ public com.google.cloud.dataproc.v1.NodeInitializationActionOrBuilder getInitial return initializationActions_.get(index); } + public static final int ENCRYPTION_CONFIG_FIELD_NUMBER = 15; + private com.google.cloud.dataproc.v1.EncryptionConfig encryptionConfig_; + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public boolean hasEncryptionConfig() { + return encryptionConfig_ != null; + } + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1.EncryptionConfig getEncryptionConfig() { + return encryptionConfig_ == null ? com.google.cloud.dataproc.v1.EncryptionConfig.getDefaultInstance() : encryptionConfig_; + } + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder() { + return getEncryptionConfig(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -528,6 +574,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (softwareConfig_ != null) { output.writeMessage(13, getSoftwareConfig()); } + if (encryptionConfig_ != null) { + output.writeMessage(15, getEncryptionConfig()); + } unknownFields.writeTo(output); } @@ -564,6 +613,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(13, getSoftwareConfig()); } + if (encryptionConfig_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(15, getEncryptionConfig()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -609,6 +662,11 @@ public boolean equals(final java.lang.Object obj) { } result = result && getInitializationActionsList() .equals(other.getInitializationActionsList()); + result = result && (hasEncryptionConfig() == other.hasEncryptionConfig()); + if (hasEncryptionConfig()) { + result = result && getEncryptionConfig() + .equals(other.getEncryptionConfig()); + } result = result && unknownFields.equals(other.unknownFields); return result; } @@ -646,6 +704,10 @@ public int hashCode() { hash = (37 * hash) + INITIALIZATION_ACTIONS_FIELD_NUMBER; hash = (53 * hash) + getInitializationActionsList().hashCode(); } + if (hasEncryptionConfig()) { + hash = (37 * hash) + ENCRYPTION_CONFIG_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionConfig().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -822,6 +884,12 @@ public Builder clear() { } else { initializationActionsBuilder_.clear(); } + if (encryptionConfigBuilder_ == null) { + encryptionConfig_ = null; + } else { + encryptionConfig_ = null; + encryptionConfigBuilder_ = null; + } return this; } @@ -885,6 +953,11 @@ public com.google.cloud.dataproc.v1.ClusterConfig buildPartial() { } else { result.initializationActions_ = initializationActionsBuilder_.build(); } + if (encryptionConfigBuilder_ == null) { + result.encryptionConfig_ = encryptionConfig_; + } else { + result.encryptionConfig_ = encryptionConfigBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -979,6 +1052,9 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1.ClusterConfig other) { } } } + if (other.hasEncryptionConfig()) { + mergeEncryptionConfig(other.getEncryptionConfig()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1012,7 +1088,7 @@ public Builder mergeFrom( private java.lang.Object configBucket_ = ""; /** *
-     * Optional. A Google Cloud Storage staging bucket used for sharing generated
+     * Optional. A Cloud Storage staging bucket used for sharing generated
      * SSH keys and config. If you do not specify a staging bucket, Cloud
      * Dataproc will determine an appropriate Cloud Storage location (US,
      * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -1036,7 +1112,7 @@ public java.lang.String getConfigBucket() {
     }
     /**
      * 
-     * Optional. A Google Cloud Storage staging bucket used for sharing generated
+     * Optional. A Cloud Storage staging bucket used for sharing generated
      * SSH keys and config. If you do not specify a staging bucket, Cloud
      * Dataproc will determine an appropriate Cloud Storage location (US,
      * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -1061,7 +1137,7 @@ public java.lang.String getConfigBucket() {
     }
     /**
      * 
-     * Optional. A Google Cloud Storage staging bucket used for sharing generated
+     * Optional. A Cloud Storage staging bucket used for sharing generated
      * SSH keys and config. If you do not specify a staging bucket, Cloud
      * Dataproc will determine an appropriate Cloud Storage location (US,
      * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -1083,7 +1159,7 @@ public Builder setConfigBucket(
     }
     /**
      * 
-     * Optional. A Google Cloud Storage staging bucket used for sharing generated
+     * Optional. A Cloud Storage staging bucket used for sharing generated
      * SSH keys and config. If you do not specify a staging bucket, Cloud
      * Dataproc will determine an appropriate Cloud Storage location (US,
      * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -1101,7 +1177,7 @@ public Builder clearConfigBucket() {
     }
     /**
      * 
-     * Optional. A Google Cloud Storage staging bucket used for sharing generated
+     * Optional. A Cloud Storage staging bucket used for sharing generated
      * SSH keys and config. If you do not specify a staging bucket, Cloud
      * Dataproc will determine an appropriate Cloud Storage location (US,
      * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -1128,7 +1204,7 @@ public Builder setConfigBucketBytes(
         com.google.cloud.dataproc.v1.GceClusterConfig, com.google.cloud.dataproc.v1.GceClusterConfig.Builder, com.google.cloud.dataproc.v1.GceClusterConfigOrBuilder> gceClusterConfigBuilder_;
     /**
      * 
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1139,7 +1215,7 @@ public boolean hasGceClusterConfig() { } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1154,7 +1230,7 @@ public com.google.cloud.dataproc.v1.GceClusterConfig getGceClusterConfig() { } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1175,7 +1251,7 @@ public Builder setGceClusterConfig(com.google.cloud.dataproc.v1.GceClusterConfig } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1194,7 +1270,7 @@ public Builder setGceClusterConfig( } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1217,7 +1293,7 @@ public Builder mergeGceClusterConfig(com.google.cloud.dataproc.v1.GceClusterConf } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1236,7 +1312,7 @@ public Builder clearGceClusterConfig() { } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1249,7 +1325,7 @@ public com.google.cloud.dataproc.v1.GceClusterConfig.Builder getGceClusterConfig } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1265,7 +1341,7 @@ public com.google.cloud.dataproc.v1.GceClusterConfigOrBuilder getGceClusterConfi } /** *
-     * Required. The shared Google Compute Engine config settings for
+     * Required. The shared Compute Engine config settings for
      * all instances in a cluster.
      * 
* @@ -1290,7 +1366,7 @@ public com.google.cloud.dataproc.v1.GceClusterConfigOrBuilder getGceClusterConfi com.google.cloud.dataproc.v1.InstanceGroupConfig, com.google.cloud.dataproc.v1.InstanceGroupConfig.Builder, com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder> masterConfigBuilder_; /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1301,7 +1377,7 @@ public boolean hasMasterConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1316,7 +1392,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig getMasterConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1337,7 +1413,7 @@ public Builder setMasterConfig(com.google.cloud.dataproc.v1.InstanceGroupConfig } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1356,7 +1432,7 @@ public Builder setMasterConfig( } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1379,7 +1455,7 @@ public Builder mergeMasterConfig(com.google.cloud.dataproc.v1.InstanceGroupConfi } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1398,7 +1474,7 @@ public Builder clearMasterConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1411,7 +1487,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig.Builder getMasterConfigB } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1427,7 +1503,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getMasterConfig } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * the master instance in a cluster.
      * 
* @@ -1452,7 +1528,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getMasterConfig com.google.cloud.dataproc.v1.InstanceGroupConfig, com.google.cloud.dataproc.v1.InstanceGroupConfig.Builder, com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder> workerConfigBuilder_; /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1463,7 +1539,7 @@ public boolean hasWorkerConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1478,7 +1554,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig getWorkerConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1499,7 +1575,7 @@ public Builder setWorkerConfig(com.google.cloud.dataproc.v1.InstanceGroupConfig } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1518,7 +1594,7 @@ public Builder setWorkerConfig( } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1541,7 +1617,7 @@ public Builder mergeWorkerConfig(com.google.cloud.dataproc.v1.InstanceGroupConfi } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1560,7 +1636,7 @@ public Builder clearWorkerConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1573,7 +1649,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig.Builder getWorkerConfigB } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1589,7 +1665,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getWorkerConfig } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * worker instances in a cluster.
      * 
* @@ -1614,7 +1690,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getWorkerConfig com.google.cloud.dataproc.v1.InstanceGroupConfig, com.google.cloud.dataproc.v1.InstanceGroupConfig.Builder, com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder> secondaryWorkerConfigBuilder_; /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1625,7 +1701,7 @@ public boolean hasSecondaryWorkerConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1640,7 +1716,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig getSecondaryWorkerConfig } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1661,7 +1737,7 @@ public Builder setSecondaryWorkerConfig(com.google.cloud.dataproc.v1.InstanceGro } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1680,7 +1756,7 @@ public Builder setSecondaryWorkerConfig( } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1703,7 +1779,7 @@ public Builder mergeSecondaryWorkerConfig(com.google.cloud.dataproc.v1.InstanceG } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1722,7 +1798,7 @@ public Builder clearSecondaryWorkerConfig() { } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1735,7 +1811,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfig.Builder getSecondaryWork } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -1751,7 +1827,7 @@ public com.google.cloud.dataproc.v1.InstanceGroupConfigOrBuilder getSecondaryWor } /** *
-     * Optional. The Google Compute Engine config settings for
+     * Optional. The Compute Engine config settings for
      * additional worker instances in a cluster.
      * 
* @@ -2397,6 +2473,159 @@ public com.google.cloud.dataproc.v1.NodeInitializationAction.Builder addInitiali } return initializationActionsBuilder_; } + + private com.google.cloud.dataproc.v1.EncryptionConfig encryptionConfig_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.EncryptionConfig, com.google.cloud.dataproc.v1.EncryptionConfig.Builder, com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder> encryptionConfigBuilder_; + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public boolean hasEncryptionConfig() { + return encryptionConfigBuilder_ != null || encryptionConfig_ != null; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1.EncryptionConfig getEncryptionConfig() { + if (encryptionConfigBuilder_ == null) { + return encryptionConfig_ == null ? com.google.cloud.dataproc.v1.EncryptionConfig.getDefaultInstance() : encryptionConfig_; + } else { + return encryptionConfigBuilder_.getMessage(); + } + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public Builder setEncryptionConfig(com.google.cloud.dataproc.v1.EncryptionConfig value) { + if (encryptionConfigBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionConfig_ = value; + onChanged(); + } else { + encryptionConfigBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public Builder setEncryptionConfig( + com.google.cloud.dataproc.v1.EncryptionConfig.Builder builderForValue) { + if (encryptionConfigBuilder_ == null) { + encryptionConfig_ = builderForValue.build(); + onChanged(); + } else { + encryptionConfigBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public Builder mergeEncryptionConfig(com.google.cloud.dataproc.v1.EncryptionConfig value) { + if (encryptionConfigBuilder_ == null) { + if (encryptionConfig_ != null) { + encryptionConfig_ = + com.google.cloud.dataproc.v1.EncryptionConfig.newBuilder(encryptionConfig_).mergeFrom(value).buildPartial(); + } else { + encryptionConfig_ = value; + } + onChanged(); + } else { + encryptionConfigBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public Builder clearEncryptionConfig() { + if (encryptionConfigBuilder_ == null) { + encryptionConfig_ = null; + onChanged(); + } else { + encryptionConfig_ = null; + encryptionConfigBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1.EncryptionConfig.Builder getEncryptionConfigBuilder() { + + onChanged(); + return getEncryptionConfigFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder() { + if (encryptionConfigBuilder_ != null) { + return encryptionConfigBuilder_.getMessageOrBuilder(); + } else { + return encryptionConfig_ == null ? + com.google.cloud.dataproc.v1.EncryptionConfig.getDefaultInstance() : encryptionConfig_; + } + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.EncryptionConfig, com.google.cloud.dataproc.v1.EncryptionConfig.Builder, com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder> + getEncryptionConfigFieldBuilder() { + if (encryptionConfigBuilder_ == null) { + encryptionConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.EncryptionConfig, com.google.cloud.dataproc.v1.EncryptionConfig.Builder, com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder>( + getEncryptionConfig(), + getParentForChildren(), + isClean()); + encryptionConfig_ = null; + } + return encryptionConfigBuilder_; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfigOrBuilder.java index 8558b132d3f1..24f81b572df0 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterConfigOrBuilder.java @@ -9,7 +9,7 @@ public interface ClusterConfigOrBuilder extends /** *
-   * Optional. A Google Cloud Storage staging bucket used for sharing generated
+   * Optional. A Cloud Storage staging bucket used for sharing generated
    * SSH keys and config. If you do not specify a staging bucket, Cloud
    * Dataproc will determine an appropriate Cloud Storage location (US,
    * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -22,7 +22,7 @@ public interface ClusterConfigOrBuilder extends
   java.lang.String getConfigBucket();
   /**
    * 
-   * Optional. A Google Cloud Storage staging bucket used for sharing generated
+   * Optional. A Cloud Storage staging bucket used for sharing generated
    * SSH keys and config. If you do not specify a staging bucket, Cloud
    * Dataproc will determine an appropriate Cloud Storage location (US,
    * ASIA, or EU) for your cluster's staging bucket according to the Google
@@ -37,7 +37,7 @@ public interface ClusterConfigOrBuilder extends
 
   /**
    * 
-   * Required. The shared Google Compute Engine config settings for
+   * Required. The shared Compute Engine config settings for
    * all instances in a cluster.
    * 
* @@ -46,7 +46,7 @@ public interface ClusterConfigOrBuilder extends boolean hasGceClusterConfig(); /** *
-   * Required. The shared Google Compute Engine config settings for
+   * Required. The shared Compute Engine config settings for
    * all instances in a cluster.
    * 
* @@ -55,7 +55,7 @@ public interface ClusterConfigOrBuilder extends com.google.cloud.dataproc.v1.GceClusterConfig getGceClusterConfig(); /** *
-   * Required. The shared Google Compute Engine config settings for
+   * Required. The shared Compute Engine config settings for
    * all instances in a cluster.
    * 
* @@ -65,7 +65,7 @@ public interface ClusterConfigOrBuilder extends /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * the master instance in a cluster.
    * 
* @@ -74,7 +74,7 @@ public interface ClusterConfigOrBuilder extends boolean hasMasterConfig(); /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * the master instance in a cluster.
    * 
* @@ -83,7 +83,7 @@ public interface ClusterConfigOrBuilder extends com.google.cloud.dataproc.v1.InstanceGroupConfig getMasterConfig(); /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * the master instance in a cluster.
    * 
* @@ -93,7 +93,7 @@ public interface ClusterConfigOrBuilder extends /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * worker instances in a cluster.
    * 
* @@ -102,7 +102,7 @@ public interface ClusterConfigOrBuilder extends boolean hasWorkerConfig(); /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * worker instances in a cluster.
    * 
* @@ -111,7 +111,7 @@ public interface ClusterConfigOrBuilder extends com.google.cloud.dataproc.v1.InstanceGroupConfig getWorkerConfig(); /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * worker instances in a cluster.
    * 
* @@ -121,7 +121,7 @@ public interface ClusterConfigOrBuilder extends /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * additional worker instances in a cluster.
    * 
* @@ -130,7 +130,7 @@ public interface ClusterConfigOrBuilder extends boolean hasSecondaryWorkerConfig(); /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * additional worker instances in a cluster.
    * 
* @@ -139,7 +139,7 @@ public interface ClusterConfigOrBuilder extends com.google.cloud.dataproc.v1.InstanceGroupConfig getSecondaryWorkerConfig(); /** *
-   * Optional. The Google Compute Engine config settings for
+   * Optional. The Compute Engine config settings for
    * additional worker instances in a cluster.
    * 
* @@ -260,4 +260,29 @@ public interface ClusterConfigOrBuilder extends */ com.google.cloud.dataproc.v1.NodeInitializationActionOrBuilder getInitializationActionsOrBuilder( int index); + + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + boolean hasEncryptionConfig(); + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + com.google.cloud.dataproc.v1.EncryptionConfig getEncryptionConfig(); + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1.EncryptionConfig encryption_config = 15; + */ + com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperation.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperation.java new file mode 100644 index 000000000000..a5cd68178217 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperation.java @@ -0,0 +1,807 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * The cluster operation triggered by a workflow.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ClusterOperation} + */ +public final class ClusterOperation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ClusterOperation) + ClusterOperationOrBuilder { +private static final long serialVersionUID = 0L; + // Use ClusterOperation.newBuilder() to construct. + private ClusterOperation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ClusterOperation() { + operationId_ = ""; + error_ = ""; + done_ = false; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClusterOperation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + operationId_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + error_ = s; + break; + } + case 24: { + + done_ = input.readBool(); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterOperation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterOperation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ClusterOperation.class, com.google.cloud.dataproc.v1.ClusterOperation.Builder.class); + } + + public static final int OPERATION_ID_FIELD_NUMBER = 1; + private volatile java.lang.Object operationId_; + /** + *
+   * Output only. The id of the cluster operation.
+   * 
+ * + * string operation_id = 1; + */ + public java.lang.String getOperationId() { + java.lang.Object ref = operationId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operationId_ = s; + return s; + } + } + /** + *
+   * Output only. The id of the cluster operation.
+   * 
+ * + * string operation_id = 1; + */ + public com.google.protobuf.ByteString + getOperationIdBytes() { + java.lang.Object ref = operationId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operationId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ERROR_FIELD_NUMBER = 2; + private volatile java.lang.Object error_; + /** + *
+   * Output only. Error, if operation failed.
+   * 
+ * + * string error = 2; + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } + } + /** + *
+   * Output only. Error, if operation failed.
+   * 
+ * + * string error = 2; + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DONE_FIELD_NUMBER = 3; + private boolean done_; + /** + *
+   * Output only. Indicates the operation is done.
+   * 
+ * + * bool done = 3; + */ + public boolean getDone() { + return done_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getOperationIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, operationId_); + } + if (!getErrorBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, error_); + } + if (done_ != false) { + output.writeBool(3, done_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getOperationIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, operationId_); + } + if (!getErrorBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, error_); + } + if (done_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, done_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ClusterOperation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ClusterOperation other = (com.google.cloud.dataproc.v1.ClusterOperation) obj; + + boolean result = true; + result = result && getOperationId() + .equals(other.getOperationId()); + result = result && getError() + .equals(other.getError()); + result = result && (getDone() + == other.getDone()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + OPERATION_ID_FIELD_NUMBER; + hash = (53 * hash) + getOperationId().hashCode(); + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + hash = (37 * hash) + DONE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDone()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ClusterOperation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ClusterOperation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * The cluster operation triggered by a workflow.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ClusterOperation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ClusterOperation) + com.google.cloud.dataproc.v1.ClusterOperationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterOperation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterOperation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ClusterOperation.class, com.google.cloud.dataproc.v1.ClusterOperation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ClusterOperation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + operationId_ = ""; + + error_ = ""; + + done_ = false; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterOperation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterOperation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterOperation build() { + com.google.cloud.dataproc.v1.ClusterOperation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterOperation buildPartial() { + com.google.cloud.dataproc.v1.ClusterOperation result = new com.google.cloud.dataproc.v1.ClusterOperation(this); + result.operationId_ = operationId_; + result.error_ = error_; + result.done_ = done_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ClusterOperation) { + return mergeFrom((com.google.cloud.dataproc.v1.ClusterOperation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ClusterOperation other) { + if (other == com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance()) return this; + if (!other.getOperationId().isEmpty()) { + operationId_ = other.operationId_; + onChanged(); + } + if (!other.getError().isEmpty()) { + error_ = other.error_; + onChanged(); + } + if (other.getDone() != false) { + setDone(other.getDone()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ClusterOperation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ClusterOperation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object operationId_ = ""; + /** + *
+     * Output only. The id of the cluster operation.
+     * 
+ * + * string operation_id = 1; + */ + public java.lang.String getOperationId() { + java.lang.Object ref = operationId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operationId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The id of the cluster operation.
+     * 
+ * + * string operation_id = 1; + */ + public com.google.protobuf.ByteString + getOperationIdBytes() { + java.lang.Object ref = operationId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operationId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The id of the cluster operation.
+     * 
+ * + * string operation_id = 1; + */ + public Builder setOperationId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + operationId_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The id of the cluster operation.
+     * 
+ * + * string operation_id = 1; + */ + public Builder clearOperationId() { + + operationId_ = getDefaultInstance().getOperationId(); + onChanged(); + return this; + } + /** + *
+     * Output only. The id of the cluster operation.
+     * 
+ * + * string operation_id = 1; + */ + public Builder setOperationIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + operationId_ = value; + onChanged(); + return this; + } + + private java.lang.Object error_ = ""; + /** + *
+     * Output only. Error, if operation failed.
+     * 
+ * + * string error = 2; + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. Error, if operation failed.
+     * 
+ * + * string error = 2; + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. Error, if operation failed.
+     * 
+ * + * string error = 2; + */ + public Builder setError( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + error_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. Error, if operation failed.
+     * 
+ * + * string error = 2; + */ + public Builder clearError() { + + error_ = getDefaultInstance().getError(); + onChanged(); + return this; + } + /** + *
+     * Output only. Error, if operation failed.
+     * 
+ * + * string error = 2; + */ + public Builder setErrorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + error_ = value; + onChanged(); + return this; + } + + private boolean done_ ; + /** + *
+     * Output only. Indicates the operation is done.
+     * 
+ * + * bool done = 3; + */ + public boolean getDone() { + return done_; + } + /** + *
+     * Output only. Indicates the operation is done.
+     * 
+ * + * bool done = 3; + */ + public Builder setDone(boolean value) { + + done_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. Indicates the operation is done.
+     * 
+ * + * bool done = 3; + */ + public Builder clearDone() { + + done_ = false; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ClusterOperation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterOperation) + private static final com.google.cloud.dataproc.v1.ClusterOperation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ClusterOperation(); + } + + public static com.google.cloud.dataproc.v1.ClusterOperation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ClusterOperation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterOperation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterOperation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadata.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadata.java index 9ffe26e17e5d..3783ffcbc829 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadata.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadata.java @@ -175,7 +175,7 @@ protected com.google.protobuf.MapField internalGetMapField( private volatile java.lang.Object clusterName_; /** *
-   * Output-only. Name of the cluster for the operation.
+   * Output only. Name of the cluster for the operation.
    * 
* * string cluster_name = 7; @@ -194,7 +194,7 @@ public java.lang.String getClusterName() { } /** *
-   * Output-only. Name of the cluster for the operation.
+   * Output only. Name of the cluster for the operation.
    * 
* * string cluster_name = 7; @@ -217,7 +217,7 @@ public java.lang.String getClusterName() { private volatile java.lang.Object clusterUuid_; /** *
-   * Output-only. Cluster UUID for the operation.
+   * Output only. Cluster UUID for the operation.
    * 
* * string cluster_uuid = 8; @@ -236,7 +236,7 @@ public java.lang.String getClusterUuid() { } /** *
-   * Output-only. Cluster UUID for the operation.
+   * Output only. Cluster UUID for the operation.
    * 
* * string cluster_uuid = 8; @@ -259,7 +259,7 @@ public java.lang.String getClusterUuid() { private com.google.cloud.dataproc.v1.ClusterOperationStatus status_; /** *
-   * Output-only. Current operation status.
+   * Output only. Current operation status.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -269,7 +269,7 @@ public boolean hasStatus() { } /** *
-   * Output-only. Current operation status.
+   * Output only. Current operation status.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -279,7 +279,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus getStatus() { } /** *
-   * Output-only. Current operation status.
+   * Output only. Current operation status.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -292,7 +292,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusOrB private java.util.List statusHistory_; /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -302,7 +302,7 @@ public java.util.List getSt } /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -313,7 +313,7 @@ public java.util.List getSt } /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -323,7 +323,7 @@ public int getStatusHistoryCount() { } /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -333,7 +333,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus getStatusHistory(int } /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -347,7 +347,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHis private volatile java.lang.Object operationType_; /** *
-   * Output-only. The operation type.
+   * Output only. The operation type.
    * 
* * string operation_type = 11; @@ -366,7 +366,7 @@ public java.lang.String getOperationType() { } /** *
-   * Output-only. The operation type.
+   * Output only. The operation type.
    * 
* * string operation_type = 11; @@ -389,7 +389,7 @@ public java.lang.String getOperationType() { private volatile java.lang.Object description_; /** *
-   * Output-only. Short description of operation.
+   * Output only. Short description of operation.
    * 
* * string description = 12; @@ -408,7 +408,7 @@ public java.lang.String getDescription() { } /** *
-   * Output-only. Short description of operation.
+   * Output only. Short description of operation.
    * 
* * string description = 12; @@ -455,7 +455,7 @@ public int getLabelsCount() { } /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -475,7 +475,7 @@ public java.util.Map getLabels() { } /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -486,7 +486,7 @@ public java.util.Map getLabelsMap() { } /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -502,7 +502,7 @@ public java.lang.String getLabelsOrDefault( } /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -523,7 +523,7 @@ public java.lang.String getLabelsOrThrow( private com.google.protobuf.LazyStringList warnings_; /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -534,7 +534,7 @@ public java.lang.String getLabelsOrThrow( } /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -544,7 +544,7 @@ public int getWarningsCount() { } /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -554,7 +554,7 @@ public java.lang.String getWarnings(int index) { } /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -1097,7 +1097,7 @@ public Builder mergeFrom( private java.lang.Object clusterName_ = ""; /** *
-     * Output-only. Name of the cluster for the operation.
+     * Output only. Name of the cluster for the operation.
      * 
* * string cluster_name = 7; @@ -1116,7 +1116,7 @@ public java.lang.String getClusterName() { } /** *
-     * Output-only. Name of the cluster for the operation.
+     * Output only. Name of the cluster for the operation.
      * 
* * string cluster_name = 7; @@ -1136,7 +1136,7 @@ public java.lang.String getClusterName() { } /** *
-     * Output-only. Name of the cluster for the operation.
+     * Output only. Name of the cluster for the operation.
      * 
* * string cluster_name = 7; @@ -1153,7 +1153,7 @@ public Builder setClusterName( } /** *
-     * Output-only. Name of the cluster for the operation.
+     * Output only. Name of the cluster for the operation.
      * 
* * string cluster_name = 7; @@ -1166,7 +1166,7 @@ public Builder clearClusterName() { } /** *
-     * Output-only. Name of the cluster for the operation.
+     * Output only. Name of the cluster for the operation.
      * 
* * string cluster_name = 7; @@ -1186,7 +1186,7 @@ public Builder setClusterNameBytes( private java.lang.Object clusterUuid_ = ""; /** *
-     * Output-only. Cluster UUID for the operation.
+     * Output only. Cluster UUID for the operation.
      * 
* * string cluster_uuid = 8; @@ -1205,7 +1205,7 @@ public java.lang.String getClusterUuid() { } /** *
-     * Output-only. Cluster UUID for the operation.
+     * Output only. Cluster UUID for the operation.
      * 
* * string cluster_uuid = 8; @@ -1225,7 +1225,7 @@ public java.lang.String getClusterUuid() { } /** *
-     * Output-only. Cluster UUID for the operation.
+     * Output only. Cluster UUID for the operation.
      * 
* * string cluster_uuid = 8; @@ -1242,7 +1242,7 @@ public Builder setClusterUuid( } /** *
-     * Output-only. Cluster UUID for the operation.
+     * Output only. Cluster UUID for the operation.
      * 
* * string cluster_uuid = 8; @@ -1255,7 +1255,7 @@ public Builder clearClusterUuid() { } /** *
-     * Output-only. Cluster UUID for the operation.
+     * Output only. Cluster UUID for the operation.
      * 
* * string cluster_uuid = 8; @@ -1277,7 +1277,7 @@ public Builder setClusterUuidBytes( com.google.cloud.dataproc.v1.ClusterOperationStatus, com.google.cloud.dataproc.v1.ClusterOperationStatus.Builder, com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder> statusBuilder_; /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1287,7 +1287,7 @@ public boolean hasStatus() { } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1301,7 +1301,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus getStatus() { } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1321,7 +1321,7 @@ public Builder setStatus(com.google.cloud.dataproc.v1.ClusterOperationStatus val } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1339,7 +1339,7 @@ public Builder setStatus( } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1361,7 +1361,7 @@ public Builder mergeStatus(com.google.cloud.dataproc.v1.ClusterOperationStatus v } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1379,7 +1379,7 @@ public Builder clearStatus() { } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1391,7 +1391,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.Builder getStatusBuil } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1406,7 +1406,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusOrB } /** *
-     * Output-only. Current operation status.
+     * Output only. Current operation status.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -1439,7 +1439,7 @@ private void ensureStatusHistoryIsMutable() { /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1453,7 +1453,7 @@ public java.util.List getSt } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1467,7 +1467,7 @@ public int getStatusHistoryCount() { } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1481,7 +1481,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus getStatusHistory(int } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1502,7 +1502,7 @@ public Builder setStatusHistory( } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1520,7 +1520,7 @@ public Builder setStatusHistory( } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1540,7 +1540,7 @@ public Builder addStatusHistory(com.google.cloud.dataproc.v1.ClusterOperationSta } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1561,7 +1561,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1579,7 +1579,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1597,7 +1597,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1616,7 +1616,7 @@ public Builder addAllStatusHistory( } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1633,7 +1633,7 @@ public Builder clearStatusHistory() { } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1650,7 +1650,7 @@ public Builder removeStatusHistory(int index) { } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1661,7 +1661,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.Builder getStatusHist } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1675,7 +1675,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHis } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1690,7 +1690,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHis } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1701,7 +1701,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.Builder addStatusHist } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1713,7 +1713,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.Builder addStatusHist } /** *
-     * Output-only. The previous operation status.
+     * Output only. The previous operation status.
      * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -1740,7 +1740,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.Builder addStatusHist private java.lang.Object operationType_ = ""; /** *
-     * Output-only. The operation type.
+     * Output only. The operation type.
      * 
* * string operation_type = 11; @@ -1759,7 +1759,7 @@ public java.lang.String getOperationType() { } /** *
-     * Output-only. The operation type.
+     * Output only. The operation type.
      * 
* * string operation_type = 11; @@ -1779,7 +1779,7 @@ public java.lang.String getOperationType() { } /** *
-     * Output-only. The operation type.
+     * Output only. The operation type.
      * 
* * string operation_type = 11; @@ -1796,7 +1796,7 @@ public Builder setOperationType( } /** *
-     * Output-only. The operation type.
+     * Output only. The operation type.
      * 
* * string operation_type = 11; @@ -1809,7 +1809,7 @@ public Builder clearOperationType() { } /** *
-     * Output-only. The operation type.
+     * Output only. The operation type.
      * 
* * string operation_type = 11; @@ -1829,7 +1829,7 @@ public Builder setOperationTypeBytes( private java.lang.Object description_ = ""; /** *
-     * Output-only. Short description of operation.
+     * Output only. Short description of operation.
      * 
* * string description = 12; @@ -1848,7 +1848,7 @@ public java.lang.String getDescription() { } /** *
-     * Output-only. Short description of operation.
+     * Output only. Short description of operation.
      * 
* * string description = 12; @@ -1868,7 +1868,7 @@ public java.lang.String getDescription() { } /** *
-     * Output-only. Short description of operation.
+     * Output only. Short description of operation.
      * 
* * string description = 12; @@ -1885,7 +1885,7 @@ public Builder setDescription( } /** *
-     * Output-only. Short description of operation.
+     * Output only. Short description of operation.
      * 
* * string description = 12; @@ -1898,7 +1898,7 @@ public Builder clearDescription() { } /** *
-     * Output-only. Short description of operation.
+     * Output only. Short description of operation.
      * 
* * string description = 12; @@ -1943,7 +1943,7 @@ public int getLabelsCount() { } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -1963,7 +1963,7 @@ public java.util.Map getLabels() { } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -1974,7 +1974,7 @@ public java.util.Map getLabelsMap() { } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -1990,7 +1990,7 @@ public java.lang.String getLabelsOrDefault( } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -2014,7 +2014,7 @@ public Builder clearLabels() { } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -2037,7 +2037,7 @@ public Builder removeLabels( } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -2053,7 +2053,7 @@ public Builder putLabels( } /** *
-     * Output-only. Labels associated with the operation
+     * Output only. Labels associated with the operation
      * 
* * map<string, string> labels = 13; @@ -2075,7 +2075,7 @@ private void ensureWarningsIsMutable() { } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2086,7 +2086,7 @@ private void ensureWarningsIsMutable() { } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2096,7 +2096,7 @@ public int getWarningsCount() { } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2106,7 +2106,7 @@ public java.lang.String getWarnings(int index) { } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2117,7 +2117,7 @@ public java.lang.String getWarnings(int index) { } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2134,7 +2134,7 @@ public Builder setWarnings( } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2151,7 +2151,7 @@ public Builder addWarnings( } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2166,7 +2166,7 @@ public Builder addAllWarnings( } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; @@ -2179,7 +2179,7 @@ public Builder clearWarnings() { } /** *
-     * Output-only. Errors encountered during operation execution.
+     * Output only. Errors encountered during operation execution.
      * 
* * repeated string warnings = 14; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadataOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadataOrBuilder.java index 6041b66215d6..d009d7679f03 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadataOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationMetadataOrBuilder.java @@ -9,7 +9,7 @@ public interface ClusterOperationMetadataOrBuilder extends /** *
-   * Output-only. Name of the cluster for the operation.
+   * Output only. Name of the cluster for the operation.
    * 
* * string cluster_name = 7; @@ -17,7 +17,7 @@ public interface ClusterOperationMetadataOrBuilder extends java.lang.String getClusterName(); /** *
-   * Output-only. Name of the cluster for the operation.
+   * Output only. Name of the cluster for the operation.
    * 
* * string cluster_name = 7; @@ -27,7 +27,7 @@ public interface ClusterOperationMetadataOrBuilder extends /** *
-   * Output-only. Cluster UUID for the operation.
+   * Output only. Cluster UUID for the operation.
    * 
* * string cluster_uuid = 8; @@ -35,7 +35,7 @@ public interface ClusterOperationMetadataOrBuilder extends java.lang.String getClusterUuid(); /** *
-   * Output-only. Cluster UUID for the operation.
+   * Output only. Cluster UUID for the operation.
    * 
* * string cluster_uuid = 8; @@ -45,7 +45,7 @@ public interface ClusterOperationMetadataOrBuilder extends /** *
-   * Output-only. Current operation status.
+   * Output only. Current operation status.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -53,7 +53,7 @@ public interface ClusterOperationMetadataOrBuilder extends boolean hasStatus(); /** *
-   * Output-only. Current operation status.
+   * Output only. Current operation status.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -61,7 +61,7 @@ public interface ClusterOperationMetadataOrBuilder extends com.google.cloud.dataproc.v1.ClusterOperationStatus getStatus(); /** *
-   * Output-only. Current operation status.
+   * Output only. Current operation status.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus status = 9; @@ -70,7 +70,7 @@ public interface ClusterOperationMetadataOrBuilder extends /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -79,7 +79,7 @@ public interface ClusterOperationMetadataOrBuilder extends getStatusHistoryList(); /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -87,7 +87,7 @@ public interface ClusterOperationMetadataOrBuilder extends com.google.cloud.dataproc.v1.ClusterOperationStatus getStatusHistory(int index); /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -95,7 +95,7 @@ public interface ClusterOperationMetadataOrBuilder extends int getStatusHistoryCount(); /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -104,7 +104,7 @@ public interface ClusterOperationMetadataOrBuilder extends getStatusHistoryOrBuilderList(); /** *
-   * Output-only. The previous operation status.
+   * Output only. The previous operation status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterOperationStatus status_history = 10; @@ -114,7 +114,7 @@ com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHistoryOrB /** *
-   * Output-only. The operation type.
+   * Output only. The operation type.
    * 
* * string operation_type = 11; @@ -122,7 +122,7 @@ com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHistoryOrB java.lang.String getOperationType(); /** *
-   * Output-only. The operation type.
+   * Output only. The operation type.
    * 
* * string operation_type = 11; @@ -132,7 +132,7 @@ com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHistoryOrB /** *
-   * Output-only. Short description of operation.
+   * Output only. Short description of operation.
    * 
* * string description = 12; @@ -140,7 +140,7 @@ com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHistoryOrB java.lang.String getDescription(); /** *
-   * Output-only. Short description of operation.
+   * Output only. Short description of operation.
    * 
* * string description = 12; @@ -150,7 +150,7 @@ com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHistoryOrB /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -158,7 +158,7 @@ com.google.cloud.dataproc.v1.ClusterOperationStatusOrBuilder getStatusHistoryOrB int getLabelsCount(); /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -173,7 +173,7 @@ boolean containsLabels( getLabels(); /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -182,7 +182,7 @@ boolean containsLabels( getLabelsMap(); /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -193,7 +193,7 @@ java.lang.String getLabelsOrDefault( java.lang.String defaultValue); /** *
-   * Output-only. Labels associated with the operation
+   * Output only. Labels associated with the operation
    * 
* * map<string, string> labels = 13; @@ -204,7 +204,7 @@ java.lang.String getLabelsOrThrow( /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -213,7 +213,7 @@ java.lang.String getLabelsOrThrow( getWarningsList(); /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -221,7 +221,7 @@ java.lang.String getLabelsOrThrow( int getWarningsCount(); /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; @@ -229,7 +229,7 @@ java.lang.String getLabelsOrThrow( java.lang.String getWarnings(int index); /** *
-   * Output-only. Errors encountered during operation execution.
+   * Output only. Errors encountered during operation execution.
    * 
* * repeated string warnings = 14; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationOrBuilder.java new file mode 100644 index 000000000000..2254f92960c2 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationOrBuilder.java @@ -0,0 +1,54 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ClusterOperationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ClusterOperation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Output only. The id of the cluster operation.
+   * 
+ * + * string operation_id = 1; + */ + java.lang.String getOperationId(); + /** + *
+   * Output only. The id of the cluster operation.
+   * 
+ * + * string operation_id = 1; + */ + com.google.protobuf.ByteString + getOperationIdBytes(); + + /** + *
+   * Output only. Error, if operation failed.
+   * 
+ * + * string error = 2; + */ + java.lang.String getError(); + /** + *
+   * Output only. Error, if operation failed.
+   * 
+ * + * string error = 2; + */ + com.google.protobuf.ByteString + getErrorBytes(); + + /** + *
+   * Output only. Indicates the operation is done.
+   * 
+ * + * bool done = 3; + */ + boolean getDone(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatus.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatus.java index 2891ea1d9fce..3843e664f43e 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatus.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatus.java @@ -268,7 +268,7 @@ private State(int value) { private int state_; /** *
-   * Output-only. A message containing the operation state.
+   * Output only. A message containing the operation state.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -278,7 +278,7 @@ public int getStateValue() { } /** *
-   * Output-only. A message containing the operation state.
+   * Output only. A message containing the operation state.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -293,7 +293,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.State getState() { private volatile java.lang.Object innerState_; /** *
-   * Output-only. A message containing the detailed operation state.
+   * Output only. A message containing the detailed operation state.
    * 
* * string inner_state = 2; @@ -312,7 +312,7 @@ public java.lang.String getInnerState() { } /** *
-   * Output-only. A message containing the detailed operation state.
+   * Output only. A message containing the detailed operation state.
    * 
* * string inner_state = 2; @@ -335,7 +335,7 @@ public java.lang.String getInnerState() { private volatile java.lang.Object details_; /** *
-   * Output-only.A message containing any operation metadata details.
+   * Output only. A message containing any operation metadata details.
    * 
* * string details = 3; @@ -354,7 +354,7 @@ public java.lang.String getDetails() { } /** *
-   * Output-only.A message containing any operation metadata details.
+   * Output only. A message containing any operation metadata details.
    * 
* * string details = 3; @@ -377,7 +377,7 @@ public java.lang.String getDetails() { private com.google.protobuf.Timestamp stateStartTime_; /** *
-   * Output-only. The time this state was entered.
+   * Output only. The time this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -387,7 +387,7 @@ public boolean hasStateStartTime() { } /** *
-   * Output-only. The time this state was entered.
+   * Output only. The time this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -397,7 +397,7 @@ public com.google.protobuf.Timestamp getStateStartTime() { } /** *
-   * Output-only. The time this state was entered.
+   * Output only. The time this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -779,7 +779,7 @@ public Builder mergeFrom( private int state_ = 0; /** *
-     * Output-only. A message containing the operation state.
+     * Output only. A message containing the operation state.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -789,7 +789,7 @@ public int getStateValue() { } /** *
-     * Output-only. A message containing the operation state.
+     * Output only. A message containing the operation state.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -801,7 +801,7 @@ public Builder setStateValue(int value) { } /** *
-     * Output-only. A message containing the operation state.
+     * Output only. A message containing the operation state.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -813,7 +813,7 @@ public com.google.cloud.dataproc.v1.ClusterOperationStatus.State getState() { } /** *
-     * Output-only. A message containing the operation state.
+     * Output only. A message containing the operation state.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -829,7 +829,7 @@ public Builder setState(com.google.cloud.dataproc.v1.ClusterOperationStatus.Stat } /** *
-     * Output-only. A message containing the operation state.
+     * Output only. A message containing the operation state.
      * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -844,7 +844,7 @@ public Builder clearState() { private java.lang.Object innerState_ = ""; /** *
-     * Output-only. A message containing the detailed operation state.
+     * Output only. A message containing the detailed operation state.
      * 
* * string inner_state = 2; @@ -863,7 +863,7 @@ public java.lang.String getInnerState() { } /** *
-     * Output-only. A message containing the detailed operation state.
+     * Output only. A message containing the detailed operation state.
      * 
* * string inner_state = 2; @@ -883,7 +883,7 @@ public java.lang.String getInnerState() { } /** *
-     * Output-only. A message containing the detailed operation state.
+     * Output only. A message containing the detailed operation state.
      * 
* * string inner_state = 2; @@ -900,7 +900,7 @@ public Builder setInnerState( } /** *
-     * Output-only. A message containing the detailed operation state.
+     * Output only. A message containing the detailed operation state.
      * 
* * string inner_state = 2; @@ -913,7 +913,7 @@ public Builder clearInnerState() { } /** *
-     * Output-only. A message containing the detailed operation state.
+     * Output only. A message containing the detailed operation state.
      * 
* * string inner_state = 2; @@ -933,7 +933,7 @@ public Builder setInnerStateBytes( private java.lang.Object details_ = ""; /** *
-     * Output-only.A message containing any operation metadata details.
+     * Output only. A message containing any operation metadata details.
      * 
* * string details = 3; @@ -952,7 +952,7 @@ public java.lang.String getDetails() { } /** *
-     * Output-only.A message containing any operation metadata details.
+     * Output only. A message containing any operation metadata details.
      * 
* * string details = 3; @@ -972,7 +972,7 @@ public java.lang.String getDetails() { } /** *
-     * Output-only.A message containing any operation metadata details.
+     * Output only. A message containing any operation metadata details.
      * 
* * string details = 3; @@ -989,7 +989,7 @@ public Builder setDetails( } /** *
-     * Output-only.A message containing any operation metadata details.
+     * Output only. A message containing any operation metadata details.
      * 
* * string details = 3; @@ -1002,7 +1002,7 @@ public Builder clearDetails() { } /** *
-     * Output-only.A message containing any operation metadata details.
+     * Output only. A message containing any operation metadata details.
      * 
* * string details = 3; @@ -1024,7 +1024,7 @@ public Builder setDetailsBytes( com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> stateStartTimeBuilder_; /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1034,7 +1034,7 @@ public boolean hasStateStartTime() { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1048,7 +1048,7 @@ public com.google.protobuf.Timestamp getStateStartTime() { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1068,7 +1068,7 @@ public Builder setStateStartTime(com.google.protobuf.Timestamp value) { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1086,7 +1086,7 @@ public Builder setStateStartTime( } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1108,7 +1108,7 @@ public Builder mergeStateStartTime(com.google.protobuf.Timestamp value) { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1126,7 +1126,7 @@ public Builder clearStateStartTime() { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1138,7 +1138,7 @@ public com.google.protobuf.Timestamp.Builder getStateStartTimeBuilder() { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -1153,7 +1153,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { } /** *
-     * Output-only. The time this state was entered.
+     * Output only. The time this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 4; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatusOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatusOrBuilder.java index 59fda28d8c64..8c5204cc65c5 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatusOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOperationStatusOrBuilder.java @@ -9,7 +9,7 @@ public interface ClusterOperationStatusOrBuilder extends /** *
-   * Output-only. A message containing the operation state.
+   * Output only. A message containing the operation state.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -17,7 +17,7 @@ public interface ClusterOperationStatusOrBuilder extends int getStateValue(); /** *
-   * Output-only. A message containing the operation state.
+   * Output only. A message containing the operation state.
    * 
* * .google.cloud.dataproc.v1.ClusterOperationStatus.State state = 1; @@ -26,7 +26,7 @@ public interface ClusterOperationStatusOrBuilder extends /** *
-   * Output-only. A message containing the detailed operation state.
+   * Output only. A message containing the detailed operation state.
    * 
* * string inner_state = 2; @@ -34,7 +34,7 @@ public interface ClusterOperationStatusOrBuilder extends java.lang.String getInnerState(); /** *
-   * Output-only. A message containing the detailed operation state.
+   * Output only. A message containing the detailed operation state.
    * 
* * string inner_state = 2; @@ -44,7 +44,7 @@ public interface ClusterOperationStatusOrBuilder extends /** *
-   * Output-only.A message containing any operation metadata details.
+   * Output only. A message containing any operation metadata details.
    * 
* * string details = 3; @@ -52,7 +52,7 @@ public interface ClusterOperationStatusOrBuilder extends java.lang.String getDetails(); /** *
-   * Output-only.A message containing any operation metadata details.
+   * Output only. A message containing any operation metadata details.
    * 
* * string details = 3; @@ -62,7 +62,7 @@ public interface ClusterOperationStatusOrBuilder extends /** *
-   * Output-only. The time this state was entered.
+   * Output only. The time this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -70,7 +70,7 @@ public interface ClusterOperationStatusOrBuilder extends boolean hasStateStartTime(); /** *
-   * Output-only. The time this state was entered.
+   * Output only. The time this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 4; @@ -78,7 +78,7 @@ public interface ClusterOperationStatusOrBuilder extends com.google.protobuf.Timestamp getStateStartTime(); /** *
-   * Output-only. The time this state was entered.
+   * Output only. The time this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 4; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOrBuilder.java index 683f6875c213..97b1cedd26ec 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterOrBuilder.java @@ -154,7 +154,7 @@ java.lang.String getLabelsOrThrow( /** *
-   * Output-only. Cluster status.
+   * Output only. Cluster status.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -162,7 +162,7 @@ java.lang.String getLabelsOrThrow( boolean hasStatus(); /** *
-   * Output-only. Cluster status.
+   * Output only. Cluster status.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -170,7 +170,7 @@ java.lang.String getLabelsOrThrow( com.google.cloud.dataproc.v1.ClusterStatus getStatus(); /** *
-   * Output-only. Cluster status.
+   * Output only. Cluster status.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus status = 4; @@ -179,7 +179,7 @@ java.lang.String getLabelsOrThrow( /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -188,7 +188,7 @@ java.lang.String getLabelsOrThrow( getStatusHistoryList(); /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -196,7 +196,7 @@ java.lang.String getLabelsOrThrow( com.google.cloud.dataproc.v1.ClusterStatus getStatusHistory(int index); /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -204,7 +204,7 @@ java.lang.String getLabelsOrThrow( int getStatusHistoryCount(); /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -213,7 +213,7 @@ java.lang.String getLabelsOrThrow( getStatusHistoryOrBuilderList(); /** *
-   * Output-only. The previous cluster status.
+   * Output only. The previous cluster status.
    * 
* * repeated .google.cloud.dataproc.v1.ClusterStatus status_history = 7; @@ -223,7 +223,7 @@ com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusHistoryOrBuilder( /** *
-   * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+   * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
    * generates this value when it creates the cluster.
    * 
* @@ -232,7 +232,7 @@ com.google.cloud.dataproc.v1.ClusterStatusOrBuilder getStatusHistoryOrBuilder( java.lang.String getClusterUuid(); /** *
-   * Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
+   * Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc
    * generates this value when it creates the cluster.
    * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelector.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelector.java new file mode 100644 index 000000000000..2e0b5c09fa0e --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelector.java @@ -0,0 +1,931 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A selector that chooses target cluster for jobs based on metadata.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ClusterSelector} + */ +public final class ClusterSelector extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ClusterSelector) + ClusterSelectorOrBuilder { +private static final long serialVersionUID = 0L; + // Use ClusterSelector.newBuilder() to construct. + private ClusterSelector(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ClusterSelector() { + zone_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClusterSelector( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + zone_ = s; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + clusterLabels_ = com.google.protobuf.MapField.newMapField( + ClusterLabelsDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000002; + } + com.google.protobuf.MapEntry + clusterLabels__ = input.readMessage( + ClusterLabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + clusterLabels_.getMutableMap().put( + clusterLabels__.getKey(), clusterLabels__.getValue()); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 2: + return internalGetClusterLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterSelector_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ClusterSelector.class, com.google.cloud.dataproc.v1.ClusterSelector.Builder.class); + } + + private int bitField0_; + public static final int ZONE_FIELD_NUMBER = 1; + private volatile java.lang.Object zone_; + /** + *
+   * Optional. The zone where workflow process executes. This parameter does not
+   * affect the selection of the cluster.
+   * If unspecified, the zone of the first cluster matching the selector
+   * is used.
+   * 
+ * + * string zone = 1; + */ + public java.lang.String getZone() { + java.lang.Object ref = zone_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + zone_ = s; + return s; + } + } + /** + *
+   * Optional. The zone where workflow process executes. This parameter does not
+   * affect the selection of the cluster.
+   * If unspecified, the zone of the first cluster matching the selector
+   * is used.
+   * 
+ * + * string zone = 1; + */ + public com.google.protobuf.ByteString + getZoneBytes() { + java.lang.Object ref = zone_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + zone_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CLUSTER_LABELS_FIELD_NUMBER = 2; + private static final class ClusterLabelsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterSelector_ClusterLabelsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> clusterLabels_; + private com.google.protobuf.MapField + internalGetClusterLabels() { + if (clusterLabels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ClusterLabelsDefaultEntryHolder.defaultEntry); + } + return clusterLabels_; + } + + public int getClusterLabelsCount() { + return internalGetClusterLabels().getMap().size(); + } + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public boolean containsClusterLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetClusterLabels().getMap().containsKey(key); + } + /** + * Use {@link #getClusterLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getClusterLabels() { + return getClusterLabelsMap(); + } + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public java.util.Map getClusterLabelsMap() { + return internalGetClusterLabels().getMap(); + } + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public java.lang.String getClusterLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetClusterLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public java.lang.String getClusterLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetClusterLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getZoneBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, zone_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetClusterLabels(), + ClusterLabelsDefaultEntryHolder.defaultEntry, + 2); + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getZoneBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, zone_); + } + for (java.util.Map.Entry entry + : internalGetClusterLabels().getMap().entrySet()) { + com.google.protobuf.MapEntry + clusterLabels__ = ClusterLabelsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, clusterLabels__); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ClusterSelector)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ClusterSelector other = (com.google.cloud.dataproc.v1.ClusterSelector) obj; + + boolean result = true; + result = result && getZone() + .equals(other.getZone()); + result = result && internalGetClusterLabels().equals( + other.internalGetClusterLabels()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ZONE_FIELD_NUMBER; + hash = (53 * hash) + getZone().hashCode(); + if (!internalGetClusterLabels().getMap().isEmpty()) { + hash = (37 * hash) + CLUSTER_LABELS_FIELD_NUMBER; + hash = (53 * hash) + internalGetClusterLabels().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ClusterSelector prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A selector that chooses target cluster for jobs based on metadata.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ClusterSelector} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ClusterSelector) + com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 2: + return internalGetClusterLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 2: + return internalGetMutableClusterLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterSelector_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ClusterSelector.class, com.google.cloud.dataproc.v1.ClusterSelector.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ClusterSelector.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + zone_ = ""; + + internalGetMutableClusterLabels().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterSelector getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterSelector build() { + com.google.cloud.dataproc.v1.ClusterSelector result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterSelector buildPartial() { + com.google.cloud.dataproc.v1.ClusterSelector result = new com.google.cloud.dataproc.v1.ClusterSelector(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.zone_ = zone_; + result.clusterLabels_ = internalGetClusterLabels(); + result.clusterLabels_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ClusterSelector) { + return mergeFrom((com.google.cloud.dataproc.v1.ClusterSelector)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ClusterSelector other) { + if (other == com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance()) return this; + if (!other.getZone().isEmpty()) { + zone_ = other.zone_; + onChanged(); + } + internalGetMutableClusterLabels().mergeFrom( + other.internalGetClusterLabels()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ClusterSelector parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ClusterSelector) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object zone_ = ""; + /** + *
+     * Optional. The zone where workflow process executes. This parameter does not
+     * affect the selection of the cluster.
+     * If unspecified, the zone of the first cluster matching the selector
+     * is used.
+     * 
+ * + * string zone = 1; + */ + public java.lang.String getZone() { + java.lang.Object ref = zone_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + zone_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. The zone where workflow process executes. This parameter does not
+     * affect the selection of the cluster.
+     * If unspecified, the zone of the first cluster matching the selector
+     * is used.
+     * 
+ * + * string zone = 1; + */ + public com.google.protobuf.ByteString + getZoneBytes() { + java.lang.Object ref = zone_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + zone_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. The zone where workflow process executes. This parameter does not
+     * affect the selection of the cluster.
+     * If unspecified, the zone of the first cluster matching the selector
+     * is used.
+     * 
+ * + * string zone = 1; + */ + public Builder setZone( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + zone_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The zone where workflow process executes. This parameter does not
+     * affect the selection of the cluster.
+     * If unspecified, the zone of the first cluster matching the selector
+     * is used.
+     * 
+ * + * string zone = 1; + */ + public Builder clearZone() { + + zone_ = getDefaultInstance().getZone(); + onChanged(); + return this; + } + /** + *
+     * Optional. The zone where workflow process executes. This parameter does not
+     * affect the selection of the cluster.
+     * If unspecified, the zone of the first cluster matching the selector
+     * is used.
+     * 
+ * + * string zone = 1; + */ + public Builder setZoneBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + zone_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> clusterLabels_; + private com.google.protobuf.MapField + internalGetClusterLabels() { + if (clusterLabels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ClusterLabelsDefaultEntryHolder.defaultEntry); + } + return clusterLabels_; + } + private com.google.protobuf.MapField + internalGetMutableClusterLabels() { + onChanged();; + if (clusterLabels_ == null) { + clusterLabels_ = com.google.protobuf.MapField.newMapField( + ClusterLabelsDefaultEntryHolder.defaultEntry); + } + if (!clusterLabels_.isMutable()) { + clusterLabels_ = clusterLabels_.copy(); + } + return clusterLabels_; + } + + public int getClusterLabelsCount() { + return internalGetClusterLabels().getMap().size(); + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public boolean containsClusterLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetClusterLabels().getMap().containsKey(key); + } + /** + * Use {@link #getClusterLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getClusterLabels() { + return getClusterLabelsMap(); + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public java.util.Map getClusterLabelsMap() { + return internalGetClusterLabels().getMap(); + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public java.lang.String getClusterLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetClusterLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public java.lang.String getClusterLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetClusterLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearClusterLabels() { + internalGetMutableClusterLabels().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public Builder removeClusterLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableClusterLabels().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableClusterLabels() { + return internalGetMutableClusterLabels().getMutableMap(); + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + public Builder putClusterLabels( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableClusterLabels().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Required. The cluster labels. Cluster must have all labels
+     * to match.
+     * 
+ * + * map<string, string> cluster_labels = 2; + */ + + public Builder putAllClusterLabels( + java.util.Map values) { + internalGetMutableClusterLabels().getMutableMap() + .putAll(values); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ClusterSelector) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterSelector) + private static final com.google.cloud.dataproc.v1.ClusterSelector DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ClusterSelector(); + } + + public static com.google.cloud.dataproc.v1.ClusterSelector getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ClusterSelector parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterSelector(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ClusterSelector getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelectorOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelectorOrBuilder.java new file mode 100644 index 000000000000..b88f7b9023fa --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterSelectorOrBuilder.java @@ -0,0 +1,92 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ClusterSelectorOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ClusterSelector) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Optional. The zone where workflow process executes. This parameter does not
+   * affect the selection of the cluster.
+   * If unspecified, the zone of the first cluster matching the selector
+   * is used.
+   * 
+ * + * string zone = 1; + */ + java.lang.String getZone(); + /** + *
+   * Optional. The zone where workflow process executes. This parameter does not
+   * affect the selection of the cluster.
+   * If unspecified, the zone of the first cluster matching the selector
+   * is used.
+   * 
+ * + * string zone = 1; + */ + com.google.protobuf.ByteString + getZoneBytes(); + + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + int getClusterLabelsCount(); + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + boolean containsClusterLabels( + java.lang.String key); + /** + * Use {@link #getClusterLabelsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getClusterLabels(); + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + java.util.Map + getClusterLabelsMap(); + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + + java.lang.String getClusterLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Required. The cluster labels. Cluster must have all labels
+   * to match.
+   * 
+ * + * map<string, string> cluster_labels = 2; + */ + + java.lang.String getClusterLabelsOrThrow( + java.lang.String key); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatus.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatus.java index 6ea4de8f56bb..49efa356aec3 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatus.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatus.java @@ -299,11 +299,19 @@ private State(int value) { } /** + *
+   * The cluster substate.
+   * 
+ * * Protobuf enum {@code google.cloud.dataproc.v1.ClusterStatus.Substate} */ public enum Substate implements com.google.protobuf.ProtocolMessageEnum { /** + *
+     * The cluster substate is unknown.
+     * 
+ * * UNSPECIFIED = 0; */ UNSPECIFIED(0), @@ -332,6 +340,10 @@ public enum Substate ; /** + *
+     * The cluster substate is unknown.
+     * 
+ * * UNSPECIFIED = 0; */ public static final int UNSPECIFIED_VALUE = 0; @@ -435,7 +447,7 @@ private Substate(int value) { private int state_; /** *
-   * Output-only. The cluster's state.
+   * Output only. The cluster's state.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -445,7 +457,7 @@ public int getStateValue() { } /** *
-   * Output-only. The cluster's state.
+   * Output only. The cluster's state.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -460,7 +472,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.State getState() { private volatile java.lang.Object detail_; /** *
-   * Output-only. Optional details of cluster's state.
+   * Output only. Optional details of cluster's state.
    * 
* * string detail = 2; @@ -479,7 +491,7 @@ public java.lang.String getDetail() { } /** *
-   * Output-only. Optional details of cluster's state.
+   * Output only. Optional details of cluster's state.
    * 
* * string detail = 2; @@ -502,7 +514,7 @@ public java.lang.String getDetail() { private com.google.protobuf.Timestamp stateStartTime_; /** *
-   * Output-only. Time when this state was entered.
+   * Output only. Time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -512,7 +524,7 @@ public boolean hasStateStartTime() { } /** *
-   * Output-only. Time when this state was entered.
+   * Output only. Time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -522,7 +534,7 @@ public com.google.protobuf.Timestamp getStateStartTime() { } /** *
-   * Output-only. Time when this state was entered.
+   * Output only. Time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -535,7 +547,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { private int substate_; /** *
-   * Output-only. Additional state information that includes
+   * Output only. Additional state information that includes
    * status reported by the agent.
    * 
* @@ -546,7 +558,7 @@ public int getSubstateValue() { } /** *
-   * Output-only. Additional state information that includes
+   * Output only. Additional state information that includes
    * status reported by the agent.
    * 
* @@ -930,7 +942,7 @@ public Builder mergeFrom( private int state_ = 0; /** *
-     * Output-only. The cluster's state.
+     * Output only. The cluster's state.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -940,7 +952,7 @@ public int getStateValue() { } /** *
-     * Output-only. The cluster's state.
+     * Output only. The cluster's state.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -952,7 +964,7 @@ public Builder setStateValue(int value) { } /** *
-     * Output-only. The cluster's state.
+     * Output only. The cluster's state.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -964,7 +976,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.State getState() { } /** *
-     * Output-only. The cluster's state.
+     * Output only. The cluster's state.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -980,7 +992,7 @@ public Builder setState(com.google.cloud.dataproc.v1.ClusterStatus.State value) } /** *
-     * Output-only. The cluster's state.
+     * Output only. The cluster's state.
      * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -995,7 +1007,7 @@ public Builder clearState() { private java.lang.Object detail_ = ""; /** *
-     * Output-only. Optional details of cluster's state.
+     * Output only. Optional details of cluster's state.
      * 
* * string detail = 2; @@ -1014,7 +1026,7 @@ public java.lang.String getDetail() { } /** *
-     * Output-only. Optional details of cluster's state.
+     * Output only. Optional details of cluster's state.
      * 
* * string detail = 2; @@ -1034,7 +1046,7 @@ public java.lang.String getDetail() { } /** *
-     * Output-only. Optional details of cluster's state.
+     * Output only. Optional details of cluster's state.
      * 
* * string detail = 2; @@ -1051,7 +1063,7 @@ public Builder setDetail( } /** *
-     * Output-only. Optional details of cluster's state.
+     * Output only. Optional details of cluster's state.
      * 
* * string detail = 2; @@ -1064,7 +1076,7 @@ public Builder clearDetail() { } /** *
-     * Output-only. Optional details of cluster's state.
+     * Output only. Optional details of cluster's state.
      * 
* * string detail = 2; @@ -1086,7 +1098,7 @@ public Builder setDetailBytes( com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> stateStartTimeBuilder_; /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1096,7 +1108,7 @@ public boolean hasStateStartTime() { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1110,7 +1122,7 @@ public com.google.protobuf.Timestamp getStateStartTime() { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1130,7 +1142,7 @@ public Builder setStateStartTime(com.google.protobuf.Timestamp value) { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1148,7 +1160,7 @@ public Builder setStateStartTime( } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1170,7 +1182,7 @@ public Builder mergeStateStartTime(com.google.protobuf.Timestamp value) { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1188,7 +1200,7 @@ public Builder clearStateStartTime() { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1200,7 +1212,7 @@ public com.google.protobuf.Timestamp.Builder getStateStartTimeBuilder() { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1215,7 +1227,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { } /** *
-     * Output-only. Time when this state was entered.
+     * Output only. Time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -1237,7 +1249,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { private int substate_ = 0; /** *
-     * Output-only. Additional state information that includes
+     * Output only. Additional state information that includes
      * status reported by the agent.
      * 
* @@ -1248,7 +1260,7 @@ public int getSubstateValue() { } /** *
-     * Output-only. Additional state information that includes
+     * Output only. Additional state information that includes
      * status reported by the agent.
      * 
* @@ -1261,7 +1273,7 @@ public Builder setSubstateValue(int value) { } /** *
-     * Output-only. Additional state information that includes
+     * Output only. Additional state information that includes
      * status reported by the agent.
      * 
* @@ -1274,7 +1286,7 @@ public com.google.cloud.dataproc.v1.ClusterStatus.Substate getSubstate() { } /** *
-     * Output-only. Additional state information that includes
+     * Output only. Additional state information that includes
      * status reported by the agent.
      * 
* @@ -1291,7 +1303,7 @@ public Builder setSubstate(com.google.cloud.dataproc.v1.ClusterStatus.Substate v } /** *
-     * Output-only. Additional state information that includes
+     * Output only. Additional state information that includes
      * status reported by the agent.
      * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatusOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatusOrBuilder.java index 759653ddd83b..2fb4073d747d 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatusOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClusterStatusOrBuilder.java @@ -9,7 +9,7 @@ public interface ClusterStatusOrBuilder extends /** *
-   * Output-only. The cluster's state.
+   * Output only. The cluster's state.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -17,7 +17,7 @@ public interface ClusterStatusOrBuilder extends int getStateValue(); /** *
-   * Output-only. The cluster's state.
+   * Output only. The cluster's state.
    * 
* * .google.cloud.dataproc.v1.ClusterStatus.State state = 1; @@ -26,7 +26,7 @@ public interface ClusterStatusOrBuilder extends /** *
-   * Output-only. Optional details of cluster's state.
+   * Output only. Optional details of cluster's state.
    * 
* * string detail = 2; @@ -34,7 +34,7 @@ public interface ClusterStatusOrBuilder extends java.lang.String getDetail(); /** *
-   * Output-only. Optional details of cluster's state.
+   * Output only. Optional details of cluster's state.
    * 
* * string detail = 2; @@ -44,7 +44,7 @@ public interface ClusterStatusOrBuilder extends /** *
-   * Output-only. Time when this state was entered.
+   * Output only. Time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -52,7 +52,7 @@ public interface ClusterStatusOrBuilder extends boolean hasStateStartTime(); /** *
-   * Output-only. Time when this state was entered.
+   * Output only. Time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -60,7 +60,7 @@ public interface ClusterStatusOrBuilder extends com.google.protobuf.Timestamp getStateStartTime(); /** *
-   * Output-only. Time when this state was entered.
+   * Output only. Time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 3; @@ -69,7 +69,7 @@ public interface ClusterStatusOrBuilder extends /** *
-   * Output-only. Additional state information that includes
+   * Output only. Additional state information that includes
    * status reported by the agent.
    * 
* @@ -78,7 +78,7 @@ public interface ClusterStatusOrBuilder extends int getSubstateValue(); /** *
-   * Output-only. Additional state information that includes
+   * Output only. Additional state information that includes
    * status reported by the agent.
    * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClustersProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClustersProto.java index 77bdf564b851..7b6ce95ea86f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClustersProto.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ClustersProto.java @@ -29,6 +29,11 @@ public static void registerAllExtensions( static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_dataproc_v1_ClusterConfig_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_EncryptionConfig_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_EncryptionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_dataproc_v1_GceClusterConfig_descriptor; static final @@ -145,129 +150,136 @@ public static void registerAllExtensions( java.lang.String[] descriptorData = { "\n\'google/cloud/dataproc/v1/clusters.prot" + "o\022\030google.cloud.dataproc.v1\032\034google/api/" + - "annotations.proto\032)google/cloud/dataproc" + - "/v1/operations.proto\032#google/longrunning" + - "/operations.proto\032\036google/protobuf/durat" + - "ion.proto\032 google/protobuf/field_mask.pr" + - "oto\032\037google/protobuf/timestamp.proto\"\245\003\n" + - "\007Cluster\022\022\n\nproject_id\030\001 \001(\t\022\024\n\014cluster_" + - "name\030\002 \001(\t\0227\n\006config\030\003 \001(\0132\'.google.clou" + - "d.dataproc.v1.ClusterConfig\022=\n\006labels\030\010 " + - "\003(\0132-.google.cloud.dataproc.v1.Cluster.L" + - "abelsEntry\0227\n\006status\030\004 \001(\0132\'.google.clou" + - "d.dataproc.v1.ClusterStatus\022?\n\016status_hi" + - "story\030\007 \003(\0132\'.google.cloud.dataproc.v1.C" + - "lusterStatus\022\024\n\014cluster_uuid\030\006 \001(\t\0229\n\007me" + - "trics\030\t \001(\0132(.google.cloud.dataproc.v1.C" + - "lusterMetrics\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(" + - "\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\341\003\n\rClusterConfig\022\025" + - "\n\rconfig_bucket\030\001 \001(\t\022F\n\022gce_cluster_con" + - "fig\030\010 \001(\0132*.google.cloud.dataproc.v1.Gce" + - "ClusterConfig\022D\n\rmaster_config\030\t \001(\0132-.g" + - "oogle.cloud.dataproc.v1.InstanceGroupCon" + - "fig\022D\n\rworker_config\030\n \001(\0132-.google.clou" + - "d.dataproc.v1.InstanceGroupConfig\022N\n\027sec" + - "ondary_worker_config\030\014 \001(\0132-.google.clou" + - "d.dataproc.v1.InstanceGroupConfig\022A\n\017sof" + - "tware_config\030\r \001(\0132(.google.cloud.datapr" + - "oc.v1.SoftwareConfig\022R\n\026initialization_a" + - "ctions\030\013 \003(\01322.google.cloud.dataproc.v1." + - "NodeInitializationAction\"\257\002\n\020GceClusterC" + - "onfig\022\020\n\010zone_uri\030\001 \001(\t\022\023\n\013network_uri\030\002" + - " \001(\t\022\026\n\016subnetwork_uri\030\006 \001(\t\022\030\n\020internal" + - "_ip_only\030\007 \001(\010\022\027\n\017service_account\030\010 \001(\t\022" + - "\036\n\026service_account_scopes\030\003 \003(\t\022\014\n\004tags\030" + - "\004 \003(\t\022J\n\010metadata\030\005 \003(\01328.google.cloud.d" + - "ataproc.v1.GceClusterConfig.MetadataEntr" + - "y\032/\n\rMetadataEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value" + - "\030\002 \001(\t:\0028\001\"\323\002\n\023InstanceGroupConfig\022\025\n\rnu" + - "m_instances\030\001 \001(\005\022\026\n\016instance_names\030\002 \003(" + - "\t\022\021\n\timage_uri\030\003 \001(\t\022\030\n\020machine_type_uri" + - "\030\004 \001(\t\0229\n\013disk_config\030\005 \001(\0132$.google.clo" + - "ud.dataproc.v1.DiskConfig\022\026\n\016is_preempti" + - "ble\030\006 \001(\010\022J\n\024managed_group_config\030\007 \001(\0132" + - ",.google.cloud.dataproc.v1.ManagedGroupC" + - "onfig\022A\n\014accelerators\030\010 \003(\0132+.google.clo" + - "ud.dataproc.v1.AcceleratorConfig\"Y\n\022Mana" + - "gedGroupConfig\022\036\n\026instance_template_name" + - "\030\001 \001(\t\022#\n\033instance_group_manager_name\030\002 " + - "\001(\t\"L\n\021AcceleratorConfig\022\034\n\024accelerator_" + - "type_uri\030\001 \001(\t\022\031\n\021accelerator_count\030\002 \001(" + - "\005\"?\n\nDiskConfig\022\031\n\021boot_disk_size_gb\030\001 \001" + - "(\005\022\026\n\016num_local_ssds\030\002 \001(\005\"i\n\030NodeInitia" + - "lizationAction\022\027\n\017executable_file\030\001 \001(\t\022" + - "4\n\021execution_timeout\030\002 \001(\0132\031.google.prot" + - "obuf.Duration\"\355\002\n\rClusterStatus\022<\n\005state" + - "\030\001 \001(\0162-.google.cloud.dataproc.v1.Cluste" + - "rStatus.State\022\016\n\006detail\030\002 \001(\t\0224\n\020state_s" + - "tart_time\030\003 \001(\0132\032.google.protobuf.Timest" + - "amp\022B\n\010substate\030\004 \001(\01620.google.cloud.dat" + - "aproc.v1.ClusterStatus.Substate\"V\n\005State" + - "\022\013\n\007UNKNOWN\020\000\022\014\n\010CREATING\020\001\022\013\n\007RUNNING\020\002" + - "\022\t\n\005ERROR\020\003\022\014\n\010DELETING\020\004\022\014\n\010UPDATING\020\005\"" + - "<\n\010Substate\022\017\n\013UNSPECIFIED\020\000\022\r\n\tUNHEALTH" + - "Y\020\001\022\020\n\014STALE_STATUS\020\002\"\250\001\n\016SoftwareConfig" + - "\022\025\n\rimage_version\030\001 \001(\t\022L\n\nproperties\030\002 " + - "\003(\01328.google.cloud.dataproc.v1.SoftwareC" + - "onfig.PropertiesEntry\0321\n\017PropertiesEntry" + - "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\232\002\n\016Clu" + - "sterMetrics\022O\n\014hdfs_metrics\030\001 \003(\01329.goog" + - "le.cloud.dataproc.v1.ClusterMetrics.Hdfs" + - "MetricsEntry\022O\n\014yarn_metrics\030\002 \003(\01329.goo" + - "gle.cloud.dataproc.v1.ClusterMetrics.Yar" + - "nMetricsEntry\0322\n\020HdfsMetricsEntry\022\013\n\003key" + - "\030\001 \001(\t\022\r\n\005value\030\002 \001(\003:\0028\001\0322\n\020YarnMetrics" + - "Entry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\003:\0028\001\"n\n" + - "\024CreateClusterRequest\022\022\n\nproject_id\030\001 \001(" + - "\t\022\016\n\006region\030\003 \001(\t\0222\n\007cluster\030\002 \001(\0132!.goo" + - "gle.cloud.dataproc.v1.Cluster\"\265\001\n\024Update" + - "ClusterRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006re" + - "gion\030\005 \001(\t\022\024\n\014cluster_name\030\002 \001(\t\0222\n\007clus" + - "ter\030\003 \001(\0132!.google.cloud.dataproc.v1.Clu" + - "ster\022/\n\013update_mask\030\004 \001(\0132\032.google.proto" + - "buf.FieldMask\"P\n\024DeleteClusterRequest\022\022\n" + - "\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022\024\n\014clu" + - "ster_name\030\002 \001(\t\"M\n\021GetClusterRequest\022\022\n\n" + - "project_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022\024\n\014clus" + - "ter_name\030\002 \001(\t\"p\n\023ListClustersRequest\022\022\n" + - "\nproject_id\030\001 \001(\t\022\016\n\006region\030\004 \001(\t\022\016\n\006fil" + - "ter\030\005 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_tok" + - "en\030\003 \001(\t\"d\n\024ListClustersResponse\0223\n\010clus" + - "ters\030\001 \003(\0132!.google.cloud.dataproc.v1.Cl" + - "uster\022\027\n\017next_page_token\030\002 \001(\t\"R\n\026Diagno" + - "seClusterRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006" + - "region\030\003 \001(\t\022\024\n\014cluster_name\030\002 \001(\t\",\n\026Di" + - "agnoseClusterResults\022\022\n\noutput_uri\030\001 \001(\t" + - "2\262\010\n\021ClusterController\022\244\001\n\rCreateCluster" + - "\022..google.cloud.dataproc.v1.CreateCluste" + + "annotations.proto\032#google/longrunning/op" + + "erations.proto\032\036google/protobuf/duration" + + ".proto\032 google/protobuf/field_mask.proto" + + "\032\037google/protobuf/timestamp.proto\"\245\003\n\007Cl" + + "uster\022\022\n\nproject_id\030\001 \001(\t\022\024\n\014cluster_nam" + + "e\030\002 \001(\t\0227\n\006config\030\003 \001(\0132\'.google.cloud.d" + + "ataproc.v1.ClusterConfig\022=\n\006labels\030\010 \003(\013" + + "2-.google.cloud.dataproc.v1.Cluster.Labe" + + "lsEntry\0227\n\006status\030\004 \001(\0132\'.google.cloud.d" + + "ataproc.v1.ClusterStatus\022?\n\016status_histo" + + "ry\030\007 \003(\0132\'.google.cloud.dataproc.v1.Clus" + + "terStatus\022\024\n\014cluster_uuid\030\006 \001(\t\0229\n\007metri" + + "cs\030\t \001(\0132(.google.cloud.dataproc.v1.Clus" + + "terMetrics\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r" + + "\n\005value\030\002 \001(\t:\0028\001\"\250\004\n\rClusterConfig\022\025\n\rc" + + "onfig_bucket\030\001 \001(\t\022F\n\022gce_cluster_config" + + "\030\010 \001(\0132*.google.cloud.dataproc.v1.GceClu" + + "sterConfig\022D\n\rmaster_config\030\t \001(\0132-.goog" + + "le.cloud.dataproc.v1.InstanceGroupConfig" + + "\022D\n\rworker_config\030\n \001(\0132-.google.cloud.d" + + "ataproc.v1.InstanceGroupConfig\022N\n\027second" + + "ary_worker_config\030\014 \001(\0132-.google.cloud.d" + + "ataproc.v1.InstanceGroupConfig\022A\n\017softwa" + + "re_config\030\r \001(\0132(.google.cloud.dataproc." + + "v1.SoftwareConfig\022R\n\026initialization_acti" + + "ons\030\013 \003(\01322.google.cloud.dataproc.v1.Nod" + + "eInitializationAction\022E\n\021encryption_conf" + + "ig\030\017 \001(\0132*.google.cloud.dataproc.v1.Encr" + + "yptionConfig\"/\n\020EncryptionConfig\022\033\n\023gce_" + + "pd_kms_key_name\030\001 \001(\t\"\257\002\n\020GceClusterConf" + + "ig\022\020\n\010zone_uri\030\001 \001(\t\022\023\n\013network_uri\030\002 \001(" + + "\t\022\026\n\016subnetwork_uri\030\006 \001(\t\022\030\n\020internal_ip" + + "_only\030\007 \001(\010\022\027\n\017service_account\030\010 \001(\t\022\036\n\026" + + "service_account_scopes\030\003 \003(\t\022\014\n\004tags\030\004 \003" + + "(\t\022J\n\010metadata\030\005 \003(\01328.google.cloud.data" + + "proc.v1.GceClusterConfig.MetadataEntry\032/" + + "\n\rMetadataEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 " + + "\001(\t:\0028\001\"\323\002\n\023InstanceGroupConfig\022\025\n\rnum_i" + + "nstances\030\001 \001(\005\022\026\n\016instance_names\030\002 \003(\t\022\021" + + "\n\timage_uri\030\003 \001(\t\022\030\n\020machine_type_uri\030\004 " + + "\001(\t\0229\n\013disk_config\030\005 \001(\0132$.google.cloud." + + "dataproc.v1.DiskConfig\022\026\n\016is_preemptible" + + "\030\006 \001(\010\022J\n\024managed_group_config\030\007 \001(\0132,.g" + + "oogle.cloud.dataproc.v1.ManagedGroupConf" + + "ig\022A\n\014accelerators\030\010 \003(\0132+.google.cloud." + + "dataproc.v1.AcceleratorConfig\"Y\n\022Managed" + + "GroupConfig\022\036\n\026instance_template_name\030\001 " + + "\001(\t\022#\n\033instance_group_manager_name\030\002 \001(\t" + + "\"L\n\021AcceleratorConfig\022\034\n\024accelerator_typ" + + "e_uri\030\001 \001(\t\022\031\n\021accelerator_count\030\002 \001(\005\"W" + + "\n\nDiskConfig\022\026\n\016boot_disk_type\030\003 \001(\t\022\031\n\021" + + "boot_disk_size_gb\030\001 \001(\005\022\026\n\016num_local_ssd" + + "s\030\002 \001(\005\"i\n\030NodeInitializationAction\022\027\n\017e" + + "xecutable_file\030\001 \001(\t\0224\n\021execution_timeou" + + "t\030\002 \001(\0132\031.google.protobuf.Duration\"\355\002\n\rC" + + "lusterStatus\022<\n\005state\030\001 \001(\0162-.google.clo" + + "ud.dataproc.v1.ClusterStatus.State\022\016\n\006de" + + "tail\030\002 \001(\t\0224\n\020state_start_time\030\003 \001(\0132\032.g" + + "oogle.protobuf.Timestamp\022B\n\010substate\030\004 \001" + + "(\01620.google.cloud.dataproc.v1.ClusterSta" + + "tus.Substate\"V\n\005State\022\013\n\007UNKNOWN\020\000\022\014\n\010CR" + + "EATING\020\001\022\013\n\007RUNNING\020\002\022\t\n\005ERROR\020\003\022\014\n\010DELE" + + "TING\020\004\022\014\n\010UPDATING\020\005\"<\n\010Substate\022\017\n\013UNSP" + + "ECIFIED\020\000\022\r\n\tUNHEALTHY\020\001\022\020\n\014STALE_STATUS" + + "\020\002\"\250\001\n\016SoftwareConfig\022\025\n\rimage_version\030\001" + + " \001(\t\022L\n\nproperties\030\002 \003(\01328.google.cloud." + + "dataproc.v1.SoftwareConfig.PropertiesEnt" + + "ry\0321\n\017PropertiesEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va" + + "lue\030\002 \001(\t:\0028\001\"\232\002\n\016ClusterMetrics\022O\n\014hdfs" + + "_metrics\030\001 \003(\01329.google.cloud.dataproc.v" + + "1.ClusterMetrics.HdfsMetricsEntry\022O\n\014yar" + + "n_metrics\030\002 \003(\01329.google.cloud.dataproc." + + "v1.ClusterMetrics.YarnMetricsEntry\0322\n\020Hd" + + "fsMetricsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001" + + "(\003:\0028\001\0322\n\020YarnMetricsEntry\022\013\n\003key\030\001 \001(\t\022" + + "\r\n\005value\030\002 \001(\003:\0028\001\"\202\001\n\024CreateClusterRequ" + + "est\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022" + + "2\n\007cluster\030\002 \001(\0132!.google.cloud.dataproc" + + ".v1.Cluster\022\022\n\nrequest_id\030\004 \001(\t\"\213\002\n\024Upda" + + "teClusterRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006" + + "region\030\005 \001(\t\022\024\n\014cluster_name\030\002 \001(\t\0222\n\007cl" + + "uster\030\003 \001(\0132!.google.cloud.dataproc.v1.C" + + "luster\022@\n\035graceful_decommission_timeout\030" + + "\006 \001(\0132\031.google.protobuf.Duration\022/\n\013upda" + + "te_mask\030\004 \001(\0132\032.google.protobuf.FieldMas" + + "k\022\022\n\nrequest_id\030\007 \001(\t\"z\n\024DeleteClusterRe" + + "quest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001(" + + "\t\022\024\n\014cluster_name\030\002 \001(\t\022\024\n\014cluster_uuid\030" + + "\004 \001(\t\022\022\n\nrequest_id\030\005 \001(\t\"M\n\021GetClusterR" + + "equest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001" + + "(\t\022\024\n\014cluster_name\030\002 \001(\t\"p\n\023ListClusters" + + "Request\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\004 " + + "\001(\t\022\016\n\006filter\030\005 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022" + + "\n\npage_token\030\003 \001(\t\"d\n\024ListClustersRespon" + + "se\0223\n\010clusters\030\001 \003(\0132!.google.cloud.data" + + "proc.v1.Cluster\022\027\n\017next_page_token\030\002 \001(\t" + + "\"R\n\026DiagnoseClusterRequest\022\022\n\nproject_id" + + "\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022\024\n\014cluster_name\030\002" + + " \001(\t\",\n\026DiagnoseClusterResults\022\022\n\noutput" + + "_uri\030\001 \001(\t2\262\010\n\021ClusterController\022\244\001\n\rCre" + + "ateCluster\022..google.cloud.dataproc.v1.Cr" + + "eateClusterRequest\032\035.google.longrunning." + + "Operation\"D\202\323\344\223\002>\"3/v1/projects/{project" + + "_id}/regions/{region}/clusters:\007cluster\022" + + "\263\001\n\rUpdateCluster\022..google.cloud.datapro" + + "c.v1.UpdateClusterRequest\032\035.google.longr" + + "unning.Operation\"S\202\323\344\223\002M2B/v1/projects/{" + + "project_id}/regions/{region}/clusters/{c" + + "luster_name}:\007cluster\022\252\001\n\rDeleteCluster\022" + + "..google.cloud.dataproc.v1.DeleteCluster" + + "Request\032\035.google.longrunning.Operation\"J" + + "\202\323\344\223\002D*B/v1/projects/{project_id}/region" + + "s/{region}/clusters/{cluster_name}\022\250\001\n\nG" + + "etCluster\022+.google.cloud.dataproc.v1.Get" + + "ClusterRequest\032!.google.cloud.dataproc.v" + + "1.Cluster\"J\202\323\344\223\002D\022B/v1/projects/{project" + + "_id}/regions/{region}/clusters/{cluster_" + + "name}\022\252\001\n\014ListClusters\022-.google.cloud.da" + + "taproc.v1.ListClustersRequest\032..google.c" + + "loud.dataproc.v1.ListClustersResponse\";\202" + + "\323\344\223\0025\0223/v1/projects/{project_id}/regions" + + "/{region}/clusters\022\272\001\n\017DiagnoseCluster\0220" + + ".google.cloud.dataproc.v1.DiagnoseCluste" + "rRequest\032\035.google.longrunning.Operation\"" + - "D\202\323\344\223\002>\"3/v1/projects/{project_id}/regio" + - "ns/{region}/clusters:\007cluster\022\263\001\n\rUpdate" + - "Cluster\022..google.cloud.dataproc.v1.Updat" + - "eClusterRequest\032\035.google.longrunning.Ope" + - "ration\"S\202\323\344\223\002M2B/v1/projects/{project_id" + - "}/regions/{region}/clusters/{cluster_nam" + - "e}:\007cluster\022\252\001\n\rDeleteCluster\022..google.c" + - "loud.dataproc.v1.DeleteClusterRequest\032\035." + - "google.longrunning.Operation\"J\202\323\344\223\002D*B/v" + - "1/projects/{project_id}/regions/{region}" + - "/clusters/{cluster_name}\022\250\001\n\nGetCluster\022" + - "+.google.cloud.dataproc.v1.GetClusterReq" + - "uest\032!.google.cloud.dataproc.v1.Cluster\"" + - "J\202\323\344\223\002D\022B/v1/projects/{project_id}/regio" + - "ns/{region}/clusters/{cluster_name}\022\252\001\n\014" + - "ListClusters\022-.google.cloud.dataproc.v1." + - "ListClustersRequest\032..google.cloud.datap" + - "roc.v1.ListClustersResponse\";\202\323\344\223\0025\0223/v1" + - "/projects/{project_id}/regions/{region}/" + - "clusters\022\272\001\n\017DiagnoseCluster\0220.google.cl" + - "oud.dataproc.v1.DiagnoseClusterRequest\032\035" + - ".google.longrunning.Operation\"V\202\323\344\223\002P\"K/" + - "v1/projects/{project_id}/regions/{region" + - "}/clusters/{cluster_name}:diagnose:\001*Bq\n" + - "\034com.google.cloud.dataproc.v1B\rClustersP" + - "rotoP\001Z@google.golang.org/genproto/googl" + - "eapis/cloud/dataproc/v1;dataprocb\006proto3" + "V\202\323\344\223\002P\"K/v1/projects/{project_id}/regio" + + "ns/{region}/clusters/{cluster_name}:diag" + + "nose:\001*Bq\n\034com.google.cloud.dataproc.v1B" + + "\rClustersProtoP\001Z@google.golang.org/genp" + + "roto/googleapis/cloud/dataproc/v1;datapr" + + "ocb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -281,7 +293,6 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), - com.google.cloud.dataproc.v1.OperationsProto.getDescriptor(), com.google.longrunning.OperationsProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.FieldMaskProto.getDescriptor(), @@ -304,9 +315,15 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_ClusterConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_ClusterConfig_descriptor, - new java.lang.String[] { "ConfigBucket", "GceClusterConfig", "MasterConfig", "WorkerConfig", "SecondaryWorkerConfig", "SoftwareConfig", "InitializationActions", }); - internal_static_google_cloud_dataproc_v1_GceClusterConfig_descriptor = + new java.lang.String[] { "ConfigBucket", "GceClusterConfig", "MasterConfig", "WorkerConfig", "SecondaryWorkerConfig", "SoftwareConfig", "InitializationActions", "EncryptionConfig", }); + internal_static_google_cloud_dataproc_v1_EncryptionConfig_descriptor = getDescriptor().getMessageTypes().get(2); + internal_static_google_cloud_dataproc_v1_EncryptionConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1_EncryptionConfig_descriptor, + new java.lang.String[] { "GcePdKmsKeyName", }); + internal_static_google_cloud_dataproc_v1_GceClusterConfig_descriptor = + getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_dataproc_v1_GceClusterConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_GceClusterConfig_descriptor, @@ -318,43 +335,43 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_GceClusterConfig_MetadataEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1_InstanceGroupConfig_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_dataproc_v1_InstanceGroupConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_InstanceGroupConfig_descriptor, new java.lang.String[] { "NumInstances", "InstanceNames", "ImageUri", "MachineTypeUri", "DiskConfig", "IsPreemptible", "ManagedGroupConfig", "Accelerators", }); internal_static_google_cloud_dataproc_v1_ManagedGroupConfig_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_dataproc_v1_ManagedGroupConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_ManagedGroupConfig_descriptor, new java.lang.String[] { "InstanceTemplateName", "InstanceGroupManagerName", }); internal_static_google_cloud_dataproc_v1_AcceleratorConfig_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_dataproc_v1_AcceleratorConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_AcceleratorConfig_descriptor, new java.lang.String[] { "AcceleratorTypeUri", "AcceleratorCount", }); internal_static_google_cloud_dataproc_v1_DiskConfig_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_dataproc_v1_DiskConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_DiskConfig_descriptor, - new java.lang.String[] { "BootDiskSizeGb", "NumLocalSsds", }); + new java.lang.String[] { "BootDiskType", "BootDiskSizeGb", "NumLocalSsds", }); internal_static_google_cloud_dataproc_v1_NodeInitializationAction_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_dataproc_v1_NodeInitializationAction_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_NodeInitializationAction_descriptor, new java.lang.String[] { "ExecutableFile", "ExecutionTimeout", }); internal_static_google_cloud_dataproc_v1_ClusterStatus_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_dataproc_v1_ClusterStatus_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_ClusterStatus_descriptor, new java.lang.String[] { "State", "Detail", "StateStartTime", "Substate", }); internal_static_google_cloud_dataproc_v1_SoftwareConfig_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_dataproc_v1_SoftwareConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_SoftwareConfig_descriptor, @@ -366,7 +383,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_SoftwareConfig_PropertiesEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1_ClusterMetrics_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_dataproc_v1_ClusterMetrics_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_ClusterMetrics_descriptor, @@ -384,49 +401,49 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_ClusterMetrics_YarnMetricsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1_CreateClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_dataproc_v1_CreateClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_CreateClusterRequest_descriptor, - new java.lang.String[] { "ProjectId", "Region", "Cluster", }); + new java.lang.String[] { "ProjectId", "Region", "Cluster", "RequestId", }); internal_static_google_cloud_dataproc_v1_UpdateClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_dataproc_v1_UpdateClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_UpdateClusterRequest_descriptor, - new java.lang.String[] { "ProjectId", "Region", "ClusterName", "Cluster", "UpdateMask", }); + new java.lang.String[] { "ProjectId", "Region", "ClusterName", "Cluster", "GracefulDecommissionTimeout", "UpdateMask", "RequestId", }); internal_static_google_cloud_dataproc_v1_DeleteClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_dataproc_v1_DeleteClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_DeleteClusterRequest_descriptor, - new java.lang.String[] { "ProjectId", "Region", "ClusterName", }); + new java.lang.String[] { "ProjectId", "Region", "ClusterName", "ClusterUuid", "RequestId", }); internal_static_google_cloud_dataproc_v1_GetClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_dataproc_v1_GetClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_GetClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "ClusterName", }); internal_static_google_cloud_dataproc_v1_ListClustersRequest_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_dataproc_v1_ListClustersRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_ListClustersRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "Filter", "PageSize", "PageToken", }); internal_static_google_cloud_dataproc_v1_ListClustersResponse_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_dataproc_v1_ListClustersResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_ListClustersResponse_descriptor, new java.lang.String[] { "Clusters", "NextPageToken", }); internal_static_google_cloud_dataproc_v1_DiagnoseClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_dataproc_v1_DiagnoseClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_DiagnoseClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "ClusterName", }); internal_static_google_cloud_dataproc_v1_DiagnoseClusterResults_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(19); internal_static_google_cloud_dataproc_v1_DiagnoseClusterResults_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_DiagnoseClusterResults_descriptor, @@ -437,7 +454,6 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor .internalUpdateFileDescriptor(descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); - com.google.cloud.dataproc.v1.OperationsProto.getDescriptor(); com.google.longrunning.OperationsProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.FieldMaskProto.getDescriptor(); diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequest.java index 24628270daa2..f5fb17c005ab 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequest.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequest.java @@ -22,6 +22,7 @@ private CreateClusterRequest(com.google.protobuf.GeneratedMessageV3.Builder b private CreateClusterRequest() { projectId_ = ""; region_ = ""; + requestId_ = ""; } @java.lang.Override @@ -73,6 +74,12 @@ private CreateClusterRequest( region_ = s; break; } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -224,6 +231,64 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClusterOrBuilder() { return getCluster(); } + public static final int REQUEST_ID_FIELD_NUMBER = 4; + private volatile java.lang.Object requestId_; + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } + } + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -247,6 +312,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getRegionBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, region_); } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_); + } unknownFields.writeTo(output); } @@ -266,6 +334,9 @@ public int getSerializedSize() { if (!getRegionBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, region_); } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -291,6 +362,8 @@ public boolean equals(final java.lang.Object obj) { result = result && getCluster() .equals(other.getCluster()); } + result = result && getRequestId() + .equals(other.getRequestId()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -310,6 +383,8 @@ public int hashCode() { hash = (37 * hash) + CLUSTER_FIELD_NUMBER; hash = (53 * hash) + getCluster().hashCode(); } + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -457,6 +532,8 @@ public Builder clear() { cluster_ = null; clusterBuilder_ = null; } + requestId_ = ""; + return this; } @@ -490,6 +567,7 @@ public com.google.cloud.dataproc.v1.CreateClusterRequest buildPartial() { } else { result.cluster_ = clusterBuilder_.build(); } + result.requestId_ = requestId_; onBuilt(); return result; } @@ -549,6 +627,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1.CreateClusterRequest other if (other.hasCluster()) { mergeCluster(other.getCluster()); } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -913,6 +995,135 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClusterOrBuilder() { } return clusterBuilder_; } + + private java.lang.Object requestId_ = ""; + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public Builder setRequestId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + requestId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public Builder clearRequestId() { + + requestId_ = getDefaultInstance().getRequestId(); + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public Builder setRequestIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + requestId_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequestOrBuilder.java index 3609d5f5da36..5e8b1ccfc181 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequestOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateClusterRequestOrBuilder.java @@ -69,4 +69,38 @@ public interface CreateClusterRequestOrBuilder extends * .google.cloud.dataproc.v1.Cluster cluster = 2; */ com.google.cloud.dataproc.v1.ClusterOrBuilder getClusterOrBuilder(); + + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + java.lang.String getRequestId(); + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + com.google.protobuf.ByteString + getRequestIdBytes(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequest.java new file mode 100644 index 000000000000..64310a2fc680 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequest.java @@ -0,0 +1,820 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A request to create a workflow template.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.CreateWorkflowTemplateRequest} + */ +public final class CreateWorkflowTemplateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) + CreateWorkflowTemplateRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use CreateWorkflowTemplateRequest.newBuilder() to construct. + private CreateWorkflowTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CreateWorkflowTemplateRequest() { + parent_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CreateWorkflowTemplateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + parent_ = s; + break; + } + case 18: { + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder subBuilder = null; + if (template_ != null) { + subBuilder = template_.toBuilder(); + } + template_ = input.readMessage(com.google.cloud.dataproc.v1.WorkflowTemplate.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(template_); + template_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + private volatile java.lang.Object parent_; + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + public com.google.protobuf.ByteString + getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TEMPLATE_FIELD_NUMBER = 2; + private com.google.cloud.dataproc.v1.WorkflowTemplate template_; + /** + *
+   * Required. The Dataproc workflow template to create.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public boolean hasTemplate() { + return template_ != null; + } + /** + *
+   * Required. The Dataproc workflow template to create.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate() { + return template_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } + /** + *
+   * Required. The Dataproc workflow template to create.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder() { + return getTemplate(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getParentBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (template_ != null) { + output.writeMessage(2, getTemplate()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getParentBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (template_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getTemplate()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) obj; + + boolean result = true; + result = result && getParent() + .equals(other.getParent()); + result = result && (hasTemplate() == other.hasTemplate()); + if (hasTemplate()) { + result = result && getTemplate() + .equals(other.getTemplate()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasTemplate()) { + hash = (37 * hash) + TEMPLATE_FIELD_NUMBER; + hash = (53 * hash) + getTemplate().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to create a workflow template.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.CreateWorkflowTemplateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + parent_ = ""; + + if (templateBuilder_ == null) { + template_ = null; + } else { + template_ = null; + templateBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest build() { + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest buildPartial() { + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest(this); + result.parent_ = parent_; + if (templateBuilder_ == null) { + result.template_ = template_; + } else { + result.template_ = templateBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest other) { + if (other == com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest.getDefaultInstance()) return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + onChanged(); + } + if (other.hasTemplate()) { + mergeTemplate(other.getTemplate()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object parent_ = ""; + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public com.google.protobuf.ByteString + getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder setParent( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + parent_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder clearParent() { + + parent_ = getDefaultInstance().getParent(); + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder setParentBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + parent_ = value; + onChanged(); + return this; + } + + private com.google.cloud.dataproc.v1.WorkflowTemplate template_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> templateBuilder_; + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public boolean hasTemplate() { + return templateBuilder_ != null || template_ != null; + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate() { + if (templateBuilder_ == null) { + return template_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } else { + return templateBuilder_.getMessage(); + } + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder setTemplate(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + template_ = value; + onChanged(); + } else { + templateBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder setTemplate( + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder builderForValue) { + if (templateBuilder_ == null) { + template_ = builderForValue.build(); + onChanged(); + } else { + templateBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder mergeTemplate(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templateBuilder_ == null) { + if (template_ != null) { + template_ = + com.google.cloud.dataproc.v1.WorkflowTemplate.newBuilder(template_).mergeFrom(value).buildPartial(); + } else { + template_ = value; + } + onChanged(); + } else { + templateBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder clearTemplate() { + if (templateBuilder_ == null) { + template_ = null; + onChanged(); + } else { + template_ = null; + templateBuilder_ = null; + } + + return this; + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate.Builder getTemplateBuilder() { + + onChanged(); + return getTemplateFieldBuilder().getBuilder(); + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder() { + if (templateBuilder_ != null) { + return templateBuilder_.getMessageOrBuilder(); + } else { + return template_ == null ? + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } + } + /** + *
+     * Required. The Dataproc workflow template to create.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> + getTemplateFieldBuilder() { + if (templateBuilder_ == null) { + templateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder>( + getTemplate(), + getParentForChildren(), + isClean()); + template_ = null; + } + return templateBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) + private static final com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest(); + } + + public static com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CreateWorkflowTemplateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateWorkflowTemplateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequestOrBuilder.java new file mode 100644 index 000000000000..abe432c3931f --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/CreateWorkflowTemplateRequestOrBuilder.java @@ -0,0 +1,56 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface CreateWorkflowTemplateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.CreateWorkflowTemplateRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + java.lang.String getParent(); + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + com.google.protobuf.ByteString + getParentBytes(); + + /** + *
+   * Required. The Dataproc workflow template to create.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + boolean hasTemplate(); + /** + *
+   * Required. The Dataproc workflow template to create.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate(); + /** + *
+   * Required. The Dataproc workflow template to create.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequest.java index 9d4fdf59cd00..ac6114290712 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequest.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequest.java @@ -23,6 +23,8 @@ private DeleteClusterRequest() { projectId_ = ""; region_ = ""; clusterName_ = ""; + clusterUuid_ = ""; + requestId_ = ""; } @java.lang.Override @@ -67,6 +69,18 @@ private DeleteClusterRequest( region_ = s; break; } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + clusterUuid_ = s; + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -227,6 +241,108 @@ public java.lang.String getClusterName() { } } + public static final int CLUSTER_UUID_FIELD_NUMBER = 4; + private volatile java.lang.Object clusterUuid_; + /** + *
+   * Optional. Specifying the `cluster_uuid` means the RPC should fail
+   * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+   * 
+ * + * string cluster_uuid = 4; + */ + public java.lang.String getClusterUuid() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterUuid_ = s; + return s; + } + } + /** + *
+   * Optional. Specifying the `cluster_uuid` means the RPC should fail
+   * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+   * 
+ * + * string cluster_uuid = 4; + */ + public com.google.protobuf.ByteString + getClusterUuidBytes() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int REQUEST_ID_FIELD_NUMBER = 5; + private volatile java.lang.Object requestId_; + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } + } + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -250,6 +366,12 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getRegionBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, region_); } + if (!getClusterUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, clusterUuid_); + } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, requestId_); + } unknownFields.writeTo(output); } @@ -268,6 +390,12 @@ public int getSerializedSize() { if (!getRegionBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, region_); } + if (!getClusterUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, clusterUuid_); + } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, requestId_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -290,6 +418,10 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getRegion()); result = result && getClusterName() .equals(other.getClusterName()); + result = result && getClusterUuid() + .equals(other.getClusterUuid()); + result = result && getRequestId() + .equals(other.getRequestId()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -307,6 +439,10 @@ public int hashCode() { hash = (53 * hash) + getRegion().hashCode(); hash = (37 * hash) + CLUSTER_NAME_FIELD_NUMBER; hash = (53 * hash) + getClusterName().hashCode(); + hash = (37 * hash) + CLUSTER_UUID_FIELD_NUMBER; + hash = (53 * hash) + getClusterUuid().hashCode(); + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -450,6 +586,10 @@ public Builder clear() { clusterName_ = ""; + clusterUuid_ = ""; + + requestId_ = ""; + return this; } @@ -479,6 +619,8 @@ public com.google.cloud.dataproc.v1.DeleteClusterRequest buildPartial() { result.projectId_ = projectId_; result.region_ = region_; result.clusterName_ = clusterName_; + result.clusterUuid_ = clusterUuid_; + result.requestId_ = requestId_; onBuilt(); return result; } @@ -539,6 +681,14 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1.DeleteClusterRequest other clusterName_ = other.clusterName_; onChanged(); } + if (!other.getClusterUuid().isEmpty()) { + clusterUuid_ = other.clusterUuid_; + onChanged(); + } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -839,6 +989,229 @@ public Builder setClusterNameBytes( onChanged(); return this; } + + private java.lang.Object clusterUuid_ = ""; + /** + *
+     * Optional. Specifying the `cluster_uuid` means the RPC should fail
+     * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+     * 
+ * + * string cluster_uuid = 4; + */ + public java.lang.String getClusterUuid() { + java.lang.Object ref = clusterUuid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterUuid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. Specifying the `cluster_uuid` means the RPC should fail
+     * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+     * 
+ * + * string cluster_uuid = 4; + */ + public com.google.protobuf.ByteString + getClusterUuidBytes() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. Specifying the `cluster_uuid` means the RPC should fail
+     * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+     * 
+ * + * string cluster_uuid = 4; + */ + public Builder setClusterUuid( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + clusterUuid_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. Specifying the `cluster_uuid` means the RPC should fail
+     * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+     * 
+ * + * string cluster_uuid = 4; + */ + public Builder clearClusterUuid() { + + clusterUuid_ = getDefaultInstance().getClusterUuid(); + onChanged(); + return this; + } + /** + *
+     * Optional. Specifying the `cluster_uuid` means the RPC should fail
+     * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+     * 
+ * + * string cluster_uuid = 4; + */ + public Builder setClusterUuidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + clusterUuid_ = value; + onChanged(); + return this; + } + + private java.lang.Object requestId_ = ""; + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public Builder setRequestId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + requestId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public Builder clearRequestId() { + + requestId_ = getDefaultInstance().getRequestId(); + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public Builder setRequestIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + requestId_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequestOrBuilder.java index 85d9f3930952..a5a1de613a7b 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequestOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteClusterRequestOrBuilder.java @@ -62,4 +62,58 @@ public interface DeleteClusterRequestOrBuilder extends */ com.google.protobuf.ByteString getClusterNameBytes(); + + /** + *
+   * Optional. Specifying the `cluster_uuid` means the RPC should fail
+   * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+   * 
+ * + * string cluster_uuid = 4; + */ + java.lang.String getClusterUuid(); + /** + *
+   * Optional. Specifying the `cluster_uuid` means the RPC should fail
+   * (with error NOT_FOUND) if cluster with specified UUID does not exist.
+   * 
+ * + * string cluster_uuid = 4; + */ + com.google.protobuf.ByteString + getClusterUuidBytes(); + + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + java.lang.String getRequestId(); + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + com.google.protobuf.ByteString + getRequestIdBytes(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequest.java new file mode 100644 index 000000000000..c82fd4f4cb47 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequest.java @@ -0,0 +1,675 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A request to delete a workflow template.
+ * Currently started workflows will remain running.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest} + */ +public final class DeleteWorkflowTemplateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) + DeleteWorkflowTemplateRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use DeleteWorkflowTemplateRequest.newBuilder() to construct. + private DeleteWorkflowTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DeleteWorkflowTemplateRequest() { + name_ = ""; + version_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeleteWorkflowTemplateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + version_ = input.readInt32(); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VERSION_FIELD_NUMBER = 2; + private int version_; + /** + *
+   * Optional. The version of workflow template to delete. If specified,
+   * will only delete the template if the current server version matches
+   * specified version.
+   * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (version_ != 0) { + output.writeInt32(2, version_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, version_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && (getVersion() + == other.getVersion()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to delete a workflow template.
+   * Currently started workflows will remain running.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + version_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest build() { + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest buildPartial() { + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest(this); + result.name_ = name_; + result.version_ = version_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest other) { + if (other == com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int version_ ; + /** + *
+     * Optional. The version of workflow template to delete. If specified,
+     * will only delete the template if the current server version matches
+     * specified version.
+     * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + /** + *
+     * Optional. The version of workflow template to delete. If specified,
+     * will only delete the template if the current server version matches
+     * specified version.
+     * 
+ * + * int32 version = 2; + */ + public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The version of workflow template to delete. If specified,
+     * will only delete the template if the current server version matches
+     * specified version.
+     * 
+ * + * int32 version = 2; + */ + public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) + private static final com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest(); + } + + public static com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DeleteWorkflowTemplateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteWorkflowTemplateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequestOrBuilder.java new file mode 100644 index 000000000000..7f2e296fa07f --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DeleteWorkflowTemplateRequestOrBuilder.java @@ -0,0 +1,42 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface DeleteWorkflowTemplateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Optional. The version of workflow template to delete. If specified,
+   * will only delete the template if the current server version matches
+   * specified version.
+   * 
+ * + * int32 version = 2; + */ + int getVersion(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResults.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResults.java index 2eceb9a994ec..466f8d7a7d0a 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResults.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResults.java @@ -89,7 +89,7 @@ private DiagnoseClusterResults( private volatile java.lang.Object outputUri_; /** *
-   * Output-only. The Google Cloud Storage URI of the diagnostic output.
+   * Output only. The Cloud Storage URI of the diagnostic output.
    * The output report is a plain text file with a summary of collected
    * diagnostics.
    * 
@@ -110,7 +110,7 @@ public java.lang.String getOutputUri() { } /** *
-   * Output-only. The Google Cloud Storage URI of the diagnostic output.
+   * Output only. The Cloud Storage URI of the diagnostic output.
    * The output report is a plain text file with a summary of collected
    * diagnostics.
    * 
@@ -441,7 +441,7 @@ public Builder mergeFrom( private java.lang.Object outputUri_ = ""; /** *
-     * Output-only. The Google Cloud Storage URI of the diagnostic output.
+     * Output only. The Cloud Storage URI of the diagnostic output.
      * The output report is a plain text file with a summary of collected
      * diagnostics.
      * 
@@ -462,7 +462,7 @@ public java.lang.String getOutputUri() { } /** *
-     * Output-only. The Google Cloud Storage URI of the diagnostic output.
+     * Output only. The Cloud Storage URI of the diagnostic output.
      * The output report is a plain text file with a summary of collected
      * diagnostics.
      * 
@@ -484,7 +484,7 @@ public java.lang.String getOutputUri() { } /** *
-     * Output-only. The Google Cloud Storage URI of the diagnostic output.
+     * Output only. The Cloud Storage URI of the diagnostic output.
      * The output report is a plain text file with a summary of collected
      * diagnostics.
      * 
@@ -503,7 +503,7 @@ public Builder setOutputUri( } /** *
-     * Output-only. The Google Cloud Storage URI of the diagnostic output.
+     * Output only. The Cloud Storage URI of the diagnostic output.
      * The output report is a plain text file with a summary of collected
      * diagnostics.
      * 
@@ -518,7 +518,7 @@ public Builder clearOutputUri() { } /** *
-     * Output-only. The Google Cloud Storage URI of the diagnostic output.
+     * Output only. The Cloud Storage URI of the diagnostic output.
      * The output report is a plain text file with a summary of collected
      * diagnostics.
      * 
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResultsOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResultsOrBuilder.java index 570bc765e04b..c4cace6888b4 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResultsOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiagnoseClusterResultsOrBuilder.java @@ -9,7 +9,7 @@ public interface DiagnoseClusterResultsOrBuilder extends /** *
-   * Output-only. The Google Cloud Storage URI of the diagnostic output.
+   * Output only. The Cloud Storage URI of the diagnostic output.
    * The output report is a plain text file with a summary of collected
    * diagnostics.
    * 
@@ -19,7 +19,7 @@ public interface DiagnoseClusterResultsOrBuilder extends java.lang.String getOutputUri(); /** *
-   * Output-only. The Google Cloud Storage URI of the diagnostic output.
+   * Output only. The Cloud Storage URI of the diagnostic output.
    * The output report is a plain text file with a summary of collected
    * diagnostics.
    * 
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfig.java index 145f82ecf8a6..875da230ea38 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfig.java @@ -20,6 +20,7 @@ private DiskConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private DiskConfig() { + bootDiskType_ = ""; bootDiskSizeGb_ = 0; numLocalSsds_ = 0; } @@ -58,6 +59,12 @@ private DiskConfig( numLocalSsds_ = input.readInt32(); break; } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + bootDiskType_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -90,6 +97,52 @@ private DiskConfig( com.google.cloud.dataproc.v1.DiskConfig.class, com.google.cloud.dataproc.v1.DiskConfig.Builder.class); } + public static final int BOOT_DISK_TYPE_FIELD_NUMBER = 3; + private volatile java.lang.Object bootDiskType_; + /** + *
+   * Optional. Type of the boot disk (default is "pd-standard").
+   * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+   * "pd-standard" (Persistent Disk Hard Disk Drive).
+   * 
+ * + * string boot_disk_type = 3; + */ + public java.lang.String getBootDiskType() { + java.lang.Object ref = bootDiskType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + bootDiskType_ = s; + return s; + } + } + /** + *
+   * Optional. Type of the boot disk (default is "pd-standard").
+   * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+   * "pd-standard" (Persistent Disk Hard Disk Drive).
+   * 
+ * + * string boot_disk_type = 3; + */ + public com.google.protobuf.ByteString + getBootDiskTypeBytes() { + java.lang.Object ref = bootDiskType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bootDiskType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + public static final int BOOT_DISK_SIZE_GB_FIELD_NUMBER = 1; private int bootDiskSizeGb_; /** @@ -141,6 +194,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (numLocalSsds_ != 0) { output.writeInt32(2, numLocalSsds_); } + if (!getBootDiskTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, bootDiskType_); + } unknownFields.writeTo(output); } @@ -158,6 +214,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, numLocalSsds_); } + if (!getBootDiskTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, bootDiskType_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -174,6 +233,8 @@ public boolean equals(final java.lang.Object obj) { com.google.cloud.dataproc.v1.DiskConfig other = (com.google.cloud.dataproc.v1.DiskConfig) obj; boolean result = true; + result = result && getBootDiskType() + .equals(other.getBootDiskType()); result = result && (getBootDiskSizeGb() == other.getBootDiskSizeGb()); result = result && (getNumLocalSsds() @@ -189,6 +250,8 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + BOOT_DISK_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getBootDiskType().hashCode(); hash = (37 * hash) + BOOT_DISK_SIZE_GB_FIELD_NUMBER; hash = (53 * hash) + getBootDiskSizeGb(); hash = (37 * hash) + NUM_LOCAL_SSDS_FIELD_NUMBER; @@ -330,6 +393,8 @@ private void maybeForceBuilderInitialization() { @java.lang.Override public Builder clear() { super.clear(); + bootDiskType_ = ""; + bootDiskSizeGb_ = 0; numLocalSsds_ = 0; @@ -360,6 +425,7 @@ public com.google.cloud.dataproc.v1.DiskConfig build() { @java.lang.Override public com.google.cloud.dataproc.v1.DiskConfig buildPartial() { com.google.cloud.dataproc.v1.DiskConfig result = new com.google.cloud.dataproc.v1.DiskConfig(this); + result.bootDiskType_ = bootDiskType_; result.bootDiskSizeGb_ = bootDiskSizeGb_; result.numLocalSsds_ = numLocalSsds_; onBuilt(); @@ -410,6 +476,10 @@ public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(com.google.cloud.dataproc.v1.DiskConfig other) { if (other == com.google.cloud.dataproc.v1.DiskConfig.getDefaultInstance()) return this; + if (!other.getBootDiskType().isEmpty()) { + bootDiskType_ = other.bootDiskType_; + onChanged(); + } if (other.getBootDiskSizeGb() != 0) { setBootDiskSizeGb(other.getBootDiskSizeGb()); } @@ -445,6 +515,105 @@ public Builder mergeFrom( return this; } + private java.lang.Object bootDiskType_ = ""; + /** + *
+     * Optional. Type of the boot disk (default is "pd-standard").
+     * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+     * "pd-standard" (Persistent Disk Hard Disk Drive).
+     * 
+ * + * string boot_disk_type = 3; + */ + public java.lang.String getBootDiskType() { + java.lang.Object ref = bootDiskType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + bootDiskType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. Type of the boot disk (default is "pd-standard").
+     * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+     * "pd-standard" (Persistent Disk Hard Disk Drive).
+     * 
+ * + * string boot_disk_type = 3; + */ + public com.google.protobuf.ByteString + getBootDiskTypeBytes() { + java.lang.Object ref = bootDiskType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bootDiskType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. Type of the boot disk (default is "pd-standard").
+     * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+     * "pd-standard" (Persistent Disk Hard Disk Drive).
+     * 
+ * + * string boot_disk_type = 3; + */ + public Builder setBootDiskType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + bootDiskType_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. Type of the boot disk (default is "pd-standard").
+     * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+     * "pd-standard" (Persistent Disk Hard Disk Drive).
+     * 
+ * + * string boot_disk_type = 3; + */ + public Builder clearBootDiskType() { + + bootDiskType_ = getDefaultInstance().getBootDiskType(); + onChanged(); + return this; + } + /** + *
+     * Optional. Type of the boot disk (default is "pd-standard").
+     * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+     * "pd-standard" (Persistent Disk Hard Disk Drive).
+     * 
+ * + * string boot_disk_type = 3; + */ + public Builder setBootDiskTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + bootDiskType_ = value; + onChanged(); + return this; + } + private int bootDiskSizeGb_ ; /** *
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfigOrBuilder.java
index 7eb9328827ef..57236c4d7043 100644
--- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfigOrBuilder.java
+++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/DiskConfigOrBuilder.java
@@ -7,6 +7,28 @@ public interface DiskConfigOrBuilder extends
     // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.DiskConfig)
     com.google.protobuf.MessageOrBuilder {
 
+  /**
+   * 
+   * Optional. Type of the boot disk (default is "pd-standard").
+   * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+   * "pd-standard" (Persistent Disk Hard Disk Drive).
+   * 
+ * + * string boot_disk_type = 3; + */ + java.lang.String getBootDiskType(); + /** + *
+   * Optional. Type of the boot disk (default is "pd-standard").
+   * Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
+   * "pd-standard" (Persistent Disk Hard Disk Drive).
+   * 
+ * + * string boot_disk_type = 3; + */ + com.google.protobuf.ByteString + getBootDiskTypeBytes(); + /** *
    * Optional. Size in GB of the boot disk (default is 500GB).
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfig.java
new file mode 100644
index 000000000000..9901de871f68
--- /dev/null
+++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfig.java
@@ -0,0 +1,584 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/cloud/dataproc/v1/clusters.proto
+
+package com.google.cloud.dataproc.v1;
+
+/**
+ * 
+ * Encryption settings for the cluster.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.EncryptionConfig} + */ +public final class EncryptionConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.EncryptionConfig) + EncryptionConfigOrBuilder { +private static final long serialVersionUID = 0L; + // Use EncryptionConfig.newBuilder() to construct. + private EncryptionConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private EncryptionConfig() { + gcePdKmsKeyName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EncryptionConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + gcePdKmsKeyName_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.ClustersProto.internal_static_google_cloud_dataproc_v1_EncryptionConfig_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.ClustersProto.internal_static_google_cloud_dataproc_v1_EncryptionConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.EncryptionConfig.class, com.google.cloud.dataproc.v1.EncryptionConfig.Builder.class); + } + + public static final int GCE_PD_KMS_KEY_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object gcePdKmsKeyName_; + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public java.lang.String getGcePdKmsKeyName() { + java.lang.Object ref = gcePdKmsKeyName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + gcePdKmsKeyName_ = s; + return s; + } + } + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public com.google.protobuf.ByteString + getGcePdKmsKeyNameBytes() { + java.lang.Object ref = gcePdKmsKeyName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + gcePdKmsKeyName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getGcePdKmsKeyNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, gcePdKmsKeyName_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getGcePdKmsKeyNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, gcePdKmsKeyName_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.EncryptionConfig)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.EncryptionConfig other = (com.google.cloud.dataproc.v1.EncryptionConfig) obj; + + boolean result = true; + result = result && getGcePdKmsKeyName() + .equals(other.getGcePdKmsKeyName()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + GCE_PD_KMS_KEY_NAME_FIELD_NUMBER; + hash = (53 * hash) + getGcePdKmsKeyName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.EncryptionConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.EncryptionConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Encryption settings for the cluster.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.EncryptionConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.EncryptionConfig) + com.google.cloud.dataproc.v1.EncryptionConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.ClustersProto.internal_static_google_cloud_dataproc_v1_EncryptionConfig_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.ClustersProto.internal_static_google_cloud_dataproc_v1_EncryptionConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.EncryptionConfig.class, com.google.cloud.dataproc.v1.EncryptionConfig.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.EncryptionConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + gcePdKmsKeyName_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.ClustersProto.internal_static_google_cloud_dataproc_v1_EncryptionConfig_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.EncryptionConfig getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.EncryptionConfig.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.EncryptionConfig build() { + com.google.cloud.dataproc.v1.EncryptionConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.EncryptionConfig buildPartial() { + com.google.cloud.dataproc.v1.EncryptionConfig result = new com.google.cloud.dataproc.v1.EncryptionConfig(this); + result.gcePdKmsKeyName_ = gcePdKmsKeyName_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.EncryptionConfig) { + return mergeFrom((com.google.cloud.dataproc.v1.EncryptionConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.EncryptionConfig other) { + if (other == com.google.cloud.dataproc.v1.EncryptionConfig.getDefaultInstance()) return this; + if (!other.getGcePdKmsKeyName().isEmpty()) { + gcePdKmsKeyName_ = other.gcePdKmsKeyName_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.EncryptionConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.EncryptionConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object gcePdKmsKeyName_ = ""; + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public java.lang.String getGcePdKmsKeyName() { + java.lang.Object ref = gcePdKmsKeyName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + gcePdKmsKeyName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public com.google.protobuf.ByteString + getGcePdKmsKeyNameBytes() { + java.lang.Object ref = gcePdKmsKeyName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + gcePdKmsKeyName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public Builder setGcePdKmsKeyName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + gcePdKmsKeyName_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public Builder clearGcePdKmsKeyName() { + + gcePdKmsKeyName_ = getDefaultInstance().getGcePdKmsKeyName(); + onChanged(); + return this; + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public Builder setGcePdKmsKeyNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + gcePdKmsKeyName_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.EncryptionConfig) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.EncryptionConfig) + private static final com.google.cloud.dataproc.v1.EncryptionConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.EncryptionConfig(); + } + + public static com.google.cloud.dataproc.v1.EncryptionConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public EncryptionConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EncryptionConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.EncryptionConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfigOrBuilder.java new file mode 100644 index 000000000000..9c27025801d2 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/EncryptionConfigOrBuilder.java @@ -0,0 +1,29 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/clusters.proto + +package com.google.cloud.dataproc.v1; + +public interface EncryptionConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.EncryptionConfig) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + java.lang.String getGcePdKmsKeyName(); + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + com.google.protobuf.ByteString + getGcePdKmsKeyNameBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfig.java index 410fcbe334c0..2b746ea14fcd 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfig.java @@ -5,7 +5,7 @@ /** *
- * Common config settings for resources of Google Compute Engine cluster
+ * Common config settings for resources of Compute Engine cluster
  * instances, applicable to all instances in the cluster.
  * 
* @@ -169,7 +169,7 @@ protected com.google.protobuf.MapField internalGetMapField( private volatile java.lang.Object zoneUri_; /** *
-   * Optional. The zone where the Google Compute Engine cluster will be located.
+   * Optional. The zone where the Compute Engine cluster will be located.
    * On a create request, it is required in the "global" region. If omitted
    * in a non-global Cloud Dataproc region, the service will pick a zone in the
    * corresponding Compute Engine region. On a get request, zone will
@@ -196,7 +196,7 @@ public java.lang.String getZoneUri() {
   }
   /**
    * 
-   * Optional. The zone where the Google Compute Engine cluster will be located.
+   * Optional. The zone where the Compute Engine cluster will be located.
    * On a create request, it is required in the "global" region. If omitted
    * in a non-global Cloud Dataproc region, the service will pick a zone in the
    * corresponding Compute Engine region. On a get request, zone will
@@ -227,7 +227,7 @@ public java.lang.String getZoneUri() {
   private volatile java.lang.Object networkUri_;
   /**
    * 
-   * Optional. The Google Compute Engine network to be used for machine
+   * Optional. The Compute Engine network to be used for machine
    * communications. Cannot be specified with subnetwork_uri. If neither
    * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
    * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -254,7 +254,7 @@ public java.lang.String getNetworkUri() {
   }
   /**
    * 
-   * Optional. The Google Compute Engine network to be used for machine
+   * Optional. The Compute Engine network to be used for machine
    * communications. Cannot be specified with subnetwork_uri. If neither
    * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
    * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -285,7 +285,7 @@ public java.lang.String getNetworkUri() {
   private volatile java.lang.Object subnetworkUri_;
   /**
    * 
-   * Optional. The Google Compute Engine subnetwork to be used for machine
+   * Optional. The Compute Engine subnetwork to be used for machine
    * communications. Cannot be specified with network_uri.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -309,7 +309,7 @@ public java.lang.String getSubnetworkUri() {
   }
   /**
    * 
-   * Optional. The Google Compute Engine subnetwork to be used for machine
+   * Optional. The Compute Engine subnetwork to be used for machine
    * communications. Cannot be specified with network_uri.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -356,8 +356,8 @@ public boolean getInternalIpOnly() {
   /**
    * 
    * Optional. The service account of the instances. Defaults to the default
-   * Google Compute Engine service account. Custom service accounts need
-   * permissions equivalent to the folloing IAM roles:
+   * Compute Engine service account. Custom service accounts need
+   * permissions equivalent to the following IAM roles:
    * * roles/logging.logWriter
    * * roles/storage.objectAdmin
    * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -382,8 +382,8 @@ public java.lang.String getServiceAccount() {
   /**
    * 
    * Optional. The service account of the instances. Defaults to the default
-   * Google Compute Engine service account. Custom service accounts need
-   * permissions equivalent to the folloing IAM roles:
+   * Compute Engine service account. Custom service accounts need
+   * permissions equivalent to the following IAM roles:
    * * roles/logging.logWriter
    * * roles/storage.objectAdmin
    * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -411,7 +411,7 @@ public java.lang.String getServiceAccount() {
   private com.google.protobuf.LazyStringList serviceAccountScopes_;
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -432,7 +432,7 @@ public java.lang.String getServiceAccount() {
   }
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -452,7 +452,7 @@ public int getServiceAccountScopesCount() {
   }
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -472,7 +472,7 @@ public java.lang.String getServiceAccountScopes(int index) {
   }
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -496,7 +496,7 @@ public java.lang.String getServiceAccountScopes(int index) {
   private com.google.protobuf.LazyStringList tags_;
   /**
    * 
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -508,7 +508,7 @@ public java.lang.String getServiceAccountScopes(int index) { } /** *
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -519,7 +519,7 @@ public int getTagsCount() { } /** *
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -530,7 +530,7 @@ public java.lang.String getTags(int index) { } /** *
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -569,7 +569,7 @@ public int getMetadataCount() { } /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -590,7 +590,7 @@ public java.util.Map getMetadata() { } /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -602,7 +602,7 @@ public java.util.Map getMetadataMap() { } /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -619,7 +619,7 @@ public java.lang.String getMetadataOrDefault( } /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -892,7 +892,7 @@ protected Builder newBuilderForType( } /** *
-   * Common config settings for resources of Google Compute Engine cluster
+   * Common config settings for resources of Compute Engine cluster
    * instances, applicable to all instances in the cluster.
    * 
* @@ -1138,7 +1138,7 @@ public Builder mergeFrom( private java.lang.Object zoneUri_ = ""; /** *
-     * Optional. The zone where the Google Compute Engine cluster will be located.
+     * Optional. The zone where the Compute Engine cluster will be located.
      * On a create request, it is required in the "global" region. If omitted
      * in a non-global Cloud Dataproc region, the service will pick a zone in the
      * corresponding Compute Engine region. On a get request, zone will
@@ -1165,7 +1165,7 @@ public java.lang.String getZoneUri() {
     }
     /**
      * 
-     * Optional. The zone where the Google Compute Engine cluster will be located.
+     * Optional. The zone where the Compute Engine cluster will be located.
      * On a create request, it is required in the "global" region. If omitted
      * in a non-global Cloud Dataproc region, the service will pick a zone in the
      * corresponding Compute Engine region. On a get request, zone will
@@ -1193,7 +1193,7 @@ public java.lang.String getZoneUri() {
     }
     /**
      * 
-     * Optional. The zone where the Google Compute Engine cluster will be located.
+     * Optional. The zone where the Compute Engine cluster will be located.
      * On a create request, it is required in the "global" region. If omitted
      * in a non-global Cloud Dataproc region, the service will pick a zone in the
      * corresponding Compute Engine region. On a get request, zone will
@@ -1218,7 +1218,7 @@ public Builder setZoneUri(
     }
     /**
      * 
-     * Optional. The zone where the Google Compute Engine cluster will be located.
+     * Optional. The zone where the Compute Engine cluster will be located.
      * On a create request, it is required in the "global" region. If omitted
      * in a non-global Cloud Dataproc region, the service will pick a zone in the
      * corresponding Compute Engine region. On a get request, zone will
@@ -1239,7 +1239,7 @@ public Builder clearZoneUri() {
     }
     /**
      * 
-     * Optional. The zone where the Google Compute Engine cluster will be located.
+     * Optional. The zone where the Compute Engine cluster will be located.
      * On a create request, it is required in the "global" region. If omitted
      * in a non-global Cloud Dataproc region, the service will pick a zone in the
      * corresponding Compute Engine region. On a get request, zone will
@@ -1267,7 +1267,7 @@ public Builder setZoneUriBytes(
     private java.lang.Object networkUri_ = "";
     /**
      * 
-     * Optional. The Google Compute Engine network to be used for machine
+     * Optional. The Compute Engine network to be used for machine
      * communications. Cannot be specified with subnetwork_uri. If neither
      * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
      * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -1294,7 +1294,7 @@ public java.lang.String getNetworkUri() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine network to be used for machine
+     * Optional. The Compute Engine network to be used for machine
      * communications. Cannot be specified with subnetwork_uri. If neither
      * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
      * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -1322,7 +1322,7 @@ public java.lang.String getNetworkUri() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine network to be used for machine
+     * Optional. The Compute Engine network to be used for machine
      * communications. Cannot be specified with subnetwork_uri. If neither
      * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
      * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -1347,7 +1347,7 @@ public Builder setNetworkUri(
     }
     /**
      * 
-     * Optional. The Google Compute Engine network to be used for machine
+     * Optional. The Compute Engine network to be used for machine
      * communications. Cannot be specified with subnetwork_uri. If neither
      * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
      * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -1368,7 +1368,7 @@ public Builder clearNetworkUri() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine network to be used for machine
+     * Optional. The Compute Engine network to be used for machine
      * communications. Cannot be specified with subnetwork_uri. If neither
      * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
      * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -1396,7 +1396,7 @@ public Builder setNetworkUriBytes(
     private java.lang.Object subnetworkUri_ = "";
     /**
      * 
-     * Optional. The Google Compute Engine subnetwork to be used for machine
+     * Optional. The Compute Engine subnetwork to be used for machine
      * communications. Cannot be specified with network_uri.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -1420,7 +1420,7 @@ public java.lang.String getSubnetworkUri() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine subnetwork to be used for machine
+     * Optional. The Compute Engine subnetwork to be used for machine
      * communications. Cannot be specified with network_uri.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -1445,7 +1445,7 @@ public java.lang.String getSubnetworkUri() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine subnetwork to be used for machine
+     * Optional. The Compute Engine subnetwork to be used for machine
      * communications. Cannot be specified with network_uri.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -1467,7 +1467,7 @@ public Builder setSubnetworkUri(
     }
     /**
      * 
-     * Optional. The Google Compute Engine subnetwork to be used for machine
+     * Optional. The Compute Engine subnetwork to be used for machine
      * communications. Cannot be specified with network_uri.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -1485,7 +1485,7 @@ public Builder clearSubnetworkUri() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine subnetwork to be used for machine
+     * Optional. The Compute Engine subnetwork to be used for machine
      * communications. Cannot be specified with network_uri.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -1564,8 +1564,8 @@ public Builder clearInternalIpOnly() {
     /**
      * 
      * Optional. The service account of the instances. Defaults to the default
-     * Google Compute Engine service account. Custom service accounts need
-     * permissions equivalent to the folloing IAM roles:
+     * Compute Engine service account. Custom service accounts need
+     * permissions equivalent to the following IAM roles:
      * * roles/logging.logWriter
      * * roles/storage.objectAdmin
      * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -1590,8 +1590,8 @@ public java.lang.String getServiceAccount() {
     /**
      * 
      * Optional. The service account of the instances. Defaults to the default
-     * Google Compute Engine service account. Custom service accounts need
-     * permissions equivalent to the folloing IAM roles:
+     * Compute Engine service account. Custom service accounts need
+     * permissions equivalent to the following IAM roles:
      * * roles/logging.logWriter
      * * roles/storage.objectAdmin
      * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -1617,8 +1617,8 @@ public java.lang.String getServiceAccount() {
     /**
      * 
      * Optional. The service account of the instances. Defaults to the default
-     * Google Compute Engine service account. Custom service accounts need
-     * permissions equivalent to the folloing IAM roles:
+     * Compute Engine service account. Custom service accounts need
+     * permissions equivalent to the following IAM roles:
      * * roles/logging.logWriter
      * * roles/storage.objectAdmin
      * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -1641,8 +1641,8 @@ public Builder setServiceAccount(
     /**
      * 
      * Optional. The service account of the instances. Defaults to the default
-     * Google Compute Engine service account. Custom service accounts need
-     * permissions equivalent to the folloing IAM roles:
+     * Compute Engine service account. Custom service accounts need
+     * permissions equivalent to the following IAM roles:
      * * roles/logging.logWriter
      * * roles/storage.objectAdmin
      * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -1661,8 +1661,8 @@ public Builder clearServiceAccount() {
     /**
      * 
      * Optional. The service account of the instances. Defaults to the default
-     * Google Compute Engine service account. Custom service accounts need
-     * permissions equivalent to the folloing IAM roles:
+     * Compute Engine service account. Custom service accounts need
+     * permissions equivalent to the following IAM roles:
      * * roles/logging.logWriter
      * * roles/storage.objectAdmin
      * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -1693,7 +1693,7 @@ private void ensureServiceAccountScopesIsMutable() {
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1714,7 +1714,7 @@ private void ensureServiceAccountScopesIsMutable() {
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1734,7 +1734,7 @@ public int getServiceAccountScopesCount() {
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1754,7 +1754,7 @@ public java.lang.String getServiceAccountScopes(int index) {
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1775,7 +1775,7 @@ public java.lang.String getServiceAccountScopes(int index) {
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1802,7 +1802,7 @@ public Builder setServiceAccountScopes(
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1829,7 +1829,7 @@ public Builder addServiceAccountScopes(
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1854,7 +1854,7 @@ public Builder addAllServiceAccountScopes(
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1877,7 +1877,7 @@ public Builder clearServiceAccountScopes() {
     }
     /**
      * 
-     * Optional. The URIs of service account scopes to be included in Google
+     * Optional. The URIs of service account scopes to be included in
      * Compute Engine instances. The following base set of scopes is always
      * included:
      * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -1913,7 +1913,7 @@ private void ensureTagsIsMutable() {
     }
     /**
      * 
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -1925,7 +1925,7 @@ private void ensureTagsIsMutable() { } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -1936,7 +1936,7 @@ public int getTagsCount() { } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -1947,7 +1947,7 @@ public java.lang.String getTags(int index) { } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -1959,7 +1959,7 @@ public java.lang.String getTags(int index) { } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -1977,7 +1977,7 @@ public Builder setTags( } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -1995,7 +1995,7 @@ public Builder addTags( } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -2011,7 +2011,7 @@ public Builder addAllTags( } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -2025,7 +2025,7 @@ public Builder clearTags() { } /** *
-     * The Google Compute Engine tags to add to all instances (see
+     * The Compute Engine tags to add to all instances (see
      * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
      * 
* @@ -2071,7 +2071,7 @@ public int getMetadataCount() { } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* @@ -2092,7 +2092,7 @@ public java.util.Map getMetadata() { } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* @@ -2104,7 +2104,7 @@ public java.util.Map getMetadataMap() { } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* @@ -2121,7 +2121,7 @@ public java.lang.String getMetadataOrDefault( } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* @@ -2146,7 +2146,7 @@ public Builder clearMetadata() { } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* @@ -2170,7 +2170,7 @@ public Builder removeMetadata( } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* @@ -2187,7 +2187,7 @@ public Builder putMetadata( } /** *
-     * The Google Compute Engine metadata entries to add to all instances (see
+     * The Compute Engine metadata entries to add to all instances (see
      * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
      * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfigOrBuilder.java index b8e5350f76af..426f606b7a5f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GceClusterConfigOrBuilder.java @@ -9,7 +9,7 @@ public interface GceClusterConfigOrBuilder extends /** *
-   * Optional. The zone where the Google Compute Engine cluster will be located.
+   * Optional. The zone where the Compute Engine cluster will be located.
    * On a create request, it is required in the "global" region. If omitted
    * in a non-global Cloud Dataproc region, the service will pick a zone in the
    * corresponding Compute Engine region. On a get request, zone will
@@ -25,7 +25,7 @@ public interface GceClusterConfigOrBuilder extends
   java.lang.String getZoneUri();
   /**
    * 
-   * Optional. The zone where the Google Compute Engine cluster will be located.
+   * Optional. The zone where the Compute Engine cluster will be located.
    * On a create request, it is required in the "global" region. If omitted
    * in a non-global Cloud Dataproc region, the service will pick a zone in the
    * corresponding Compute Engine region. On a get request, zone will
@@ -43,7 +43,7 @@ public interface GceClusterConfigOrBuilder extends
 
   /**
    * 
-   * Optional. The Google Compute Engine network to be used for machine
+   * Optional. The Compute Engine network to be used for machine
    * communications. Cannot be specified with subnetwork_uri. If neither
    * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
    * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -59,7 +59,7 @@ public interface GceClusterConfigOrBuilder extends
   java.lang.String getNetworkUri();
   /**
    * 
-   * Optional. The Google Compute Engine network to be used for machine
+   * Optional. The Compute Engine network to be used for machine
    * communications. Cannot be specified with subnetwork_uri. If neither
    * `network_uri` nor `subnetwork_uri` is specified, the "default" network of
    * the project is used, if it exists. Cannot be a "Custom Subnet Network" (see
@@ -77,7 +77,7 @@ public interface GceClusterConfigOrBuilder extends
 
   /**
    * 
-   * Optional. The Google Compute Engine subnetwork to be used for machine
+   * Optional. The Compute Engine subnetwork to be used for machine
    * communications. Cannot be specified with network_uri.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -90,7 +90,7 @@ public interface GceClusterConfigOrBuilder extends
   java.lang.String getSubnetworkUri();
   /**
    * 
-   * Optional. The Google Compute Engine subnetwork to be used for machine
+   * Optional. The Compute Engine subnetwork to be used for machine
    * communications. Cannot be specified with network_uri.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
@@ -120,8 +120,8 @@ public interface GceClusterConfigOrBuilder extends
   /**
    * 
    * Optional. The service account of the instances. Defaults to the default
-   * Google Compute Engine service account. Custom service accounts need
-   * permissions equivalent to the folloing IAM roles:
+   * Compute Engine service account. Custom service accounts need
+   * permissions equivalent to the following IAM roles:
    * * roles/logging.logWriter
    * * roles/storage.objectAdmin
    * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -135,8 +135,8 @@ public interface GceClusterConfigOrBuilder extends
   /**
    * 
    * Optional. The service account of the instances. Defaults to the default
-   * Google Compute Engine service account. Custom service accounts need
-   * permissions equivalent to the folloing IAM roles:
+   * Compute Engine service account. Custom service accounts need
+   * permissions equivalent to the following IAM roles:
    * * roles/logging.logWriter
    * * roles/storage.objectAdmin
    * (see https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts
@@ -151,7 +151,7 @@ public interface GceClusterConfigOrBuilder extends
 
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -170,7 +170,7 @@ public interface GceClusterConfigOrBuilder extends
       getServiceAccountScopesList();
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -188,7 +188,7 @@ public interface GceClusterConfigOrBuilder extends
   int getServiceAccountScopesCount();
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -206,7 +206,7 @@ public interface GceClusterConfigOrBuilder extends
   java.lang.String getServiceAccountScopes(int index);
   /**
    * 
-   * Optional. The URIs of service account scopes to be included in Google
+   * Optional. The URIs of service account scopes to be included in
    * Compute Engine instances. The following base set of scopes is always
    * included:
    * * https://www.googleapis.com/auth/cloud.useraccounts.readonly
@@ -226,7 +226,7 @@ public interface GceClusterConfigOrBuilder extends
 
   /**
    * 
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -236,7 +236,7 @@ public interface GceClusterConfigOrBuilder extends getTagsList(); /** *
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -245,7 +245,7 @@ public interface GceClusterConfigOrBuilder extends int getTagsCount(); /** *
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -254,7 +254,7 @@ public interface GceClusterConfigOrBuilder extends java.lang.String getTags(int index); /** *
-   * The Google Compute Engine tags to add to all instances (see
+   * The Compute Engine tags to add to all instances (see
    * [Tagging instances](/compute/docs/label-or-tag-resources#tags)).
    * 
* @@ -265,7 +265,7 @@ public interface GceClusterConfigOrBuilder extends /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -274,7 +274,7 @@ public interface GceClusterConfigOrBuilder extends int getMetadataCount(); /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -290,7 +290,7 @@ boolean containsMetadata( getMetadata(); /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -300,7 +300,7 @@ boolean containsMetadata( getMetadataMap(); /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* @@ -312,7 +312,7 @@ java.lang.String getMetadataOrDefault( java.lang.String defaultValue); /** *
-   * The Google Compute Engine metadata entries to add to all instances (see
+   * The Compute Engine metadata entries to add to all instances (see
    * [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
    * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequest.java new file mode 100644 index 000000000000..172b6fc3a7f3 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequest.java @@ -0,0 +1,673 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A request to fetch a workflow template.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.GetWorkflowTemplateRequest} + */ +public final class GetWorkflowTemplateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.GetWorkflowTemplateRequest) + GetWorkflowTemplateRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use GetWorkflowTemplateRequest.newBuilder() to construct. + private GetWorkflowTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetWorkflowTemplateRequest() { + name_ = ""; + version_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetWorkflowTemplateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + version_ = input.readInt32(); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VERSION_FIELD_NUMBER = 2; + private int version_; + /** + *
+   * Optional. The version of workflow template to retrieve. Only previously
+   * instatiated versions can be retrieved.
+   * If unspecified, retrieves the current version.
+   * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (version_ != 0) { + output.writeInt32(2, version_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, version_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && (getVersion() + == other.getVersion()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to fetch a workflow template.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.GetWorkflowTemplateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.GetWorkflowTemplateRequest) + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + version_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest build() { + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest buildPartial() { + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest(this); + result.name_ = name_; + result.version_ = version_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest other) { + if (other == com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int version_ ; + /** + *
+     * Optional. The version of workflow template to retrieve. Only previously
+     * instatiated versions can be retrieved.
+     * If unspecified, retrieves the current version.
+     * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + /** + *
+     * Optional. The version of workflow template to retrieve. Only previously
+     * instatiated versions can be retrieved.
+     * If unspecified, retrieves the current version.
+     * 
+ * + * int32 version = 2; + */ + public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The version of workflow template to retrieve. Only previously
+     * instatiated versions can be retrieved.
+     * If unspecified, retrieves the current version.
+     * 
+ * + * int32 version = 2; + */ + public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.GetWorkflowTemplateRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GetWorkflowTemplateRequest) + private static final com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest(); + } + + public static com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetWorkflowTemplateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetWorkflowTemplateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequestOrBuilder.java new file mode 100644 index 000000000000..de524cda3eeb --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/GetWorkflowTemplateRequestOrBuilder.java @@ -0,0 +1,42 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface GetWorkflowTemplateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.GetWorkflowTemplateRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Optional. The version of workflow template to retrieve. Only previously
+   * instatiated versions can be retrieved.
+   * If unspecified, retrieves the current version.
+   * 
+ * + * int32 version = 2; + */ + int getVersion(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfig.java index 111283dcf971..6cf3771b1b4b 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfig.java @@ -5,7 +5,7 @@ /** *
- * Optional. The config settings for Google Compute Engine resources in
+ * Optional. The config settings for Compute Engine resources in
  * an instance group, such as a master or worker group.
  * 
* @@ -176,9 +176,8 @@ public int getNumInstances() { private com.google.protobuf.LazyStringList instanceNames_; /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -189,9 +188,8 @@ public int getNumInstances() { } /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -201,9 +199,8 @@ public int getInstanceNamesCount() { } /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -213,9 +210,8 @@ public java.lang.String getInstanceNames(int index) { } /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -229,8 +225,9 @@ public java.lang.String getInstanceNames(int index) { private volatile java.lang.Object imageUri_; /** *
-   * Output-only. The Google Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -249,8 +246,9 @@ public java.lang.String getImageUri() { } /** *
-   * Output-only. The Google Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -273,11 +271,15 @@ public java.lang.String getImageUri() { private volatile java.lang.Object machineTypeUri_; /** *
-   * Optional. The Google Compute Engine machine type used for cluster instances.
+   * Optional. The Compute Engine machine type used for cluster instances.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `n1-standard-2`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the machine type
+   * resource, for example, `n1-standard-2`.
    * 
* * string machine_type_uri = 4; @@ -296,11 +298,15 @@ public java.lang.String getMachineTypeUri() { } /** *
-   * Optional. The Google Compute Engine machine type used for cluster instances.
+   * Optional. The Compute Engine machine type used for cluster instances.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `n1-standard-2`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the machine type
+   * resource, for example, `n1-standard-2`.
    * 
* * string machine_type_uri = 4; @@ -369,7 +375,7 @@ public boolean getIsPreemptible() { private com.google.cloud.dataproc.v1.ManagedGroupConfig managedGroupConfig_; /** *
-   * Output-only. The config for Google Compute Engine Instance Group
+   * Output only. The config for Compute Engine Instance Group
    * Manager that manages this group.
    * This is only used for preemptible instance groups.
    * 
@@ -381,7 +387,7 @@ public boolean hasManagedGroupConfig() { } /** *
-   * Output-only. The config for Google Compute Engine Instance Group
+   * Output only. The config for Compute Engine Instance Group
    * Manager that manages this group.
    * This is only used for preemptible instance groups.
    * 
@@ -393,7 +399,7 @@ public com.google.cloud.dataproc.v1.ManagedGroupConfig getManagedGroupConfig() { } /** *
-   * Output-only. The config for Google Compute Engine Instance Group
+   * Output only. The config for Compute Engine Instance Group
    * Manager that manages this group.
    * This is only used for preemptible instance groups.
    * 
@@ -408,7 +414,7 @@ public com.google.cloud.dataproc.v1.ManagedGroupConfigOrBuilder getManagedGroupC private java.util.List accelerators_; /** *
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -421,7 +427,7 @@ public java.util.List getAcceler
   }
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -435,7 +441,7 @@ public java.util.List getAcceler
   }
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -448,7 +454,7 @@ public int getAcceleratorsCount() {
   }
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -461,7 +467,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfig getAccelerators(int index)
   }
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -726,7 +732,7 @@ protected Builder newBuilderForType(
   }
   /**
    * 
-   * Optional. The config settings for Google Compute Engine resources in
+   * Optional. The config settings for Compute Engine resources in
    * an instance group, such as a master or worker group.
    * 
* @@ -1037,9 +1043,8 @@ private void ensureInstanceNamesIsMutable() { } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1050,9 +1055,8 @@ private void ensureInstanceNamesIsMutable() { } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1062,9 +1066,8 @@ public int getInstanceNamesCount() { } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1074,9 +1077,8 @@ public java.lang.String getInstanceNames(int index) { } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1087,9 +1089,8 @@ public java.lang.String getInstanceNames(int index) { } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1106,9 +1107,8 @@ public Builder setInstanceNames( } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1125,9 +1125,8 @@ public Builder addInstanceNames( } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1142,9 +1141,8 @@ public Builder addAllInstanceNames( } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1157,9 +1155,8 @@ public Builder clearInstanceNames() { } /** *
-     * Optional. The list of instance names. Cloud Dataproc derives the names from
-     * `cluster_name`, `num_instances`, and the instance group if not set by user
-     * (recommended practice is to let Cloud Dataproc derive the name).
+     * Output only. The list of instance names. Cloud Dataproc derives the names
+     * from `cluster_name`, `num_instances`, and the instance group.
      * 
* * repeated string instance_names = 2; @@ -1179,8 +1176,9 @@ public Builder addInstanceNamesBytes( private java.lang.Object imageUri_ = ""; /** *
-     * Output-only. The Google Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1199,8 +1197,9 @@ public java.lang.String getImageUri() { } /** *
-     * Output-only. The Google Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1220,8 +1219,9 @@ public java.lang.String getImageUri() { } /** *
-     * Output-only. The Google Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1238,8 +1238,9 @@ public Builder setImageUri( } /** *
-     * Output-only. The Google Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1252,8 +1253,9 @@ public Builder clearImageUri() { } /** *
-     * Output-only. The Google Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1273,11 +1275,15 @@ public Builder setImageUriBytes( private java.lang.Object machineTypeUri_ = ""; /** *
-     * Optional. The Google Compute Engine machine type used for cluster instances.
+     * Optional. The Compute Engine machine type used for cluster instances.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `n1-standard-2`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the machine type
+     * resource, for example, `n1-standard-2`.
      * 
* * string machine_type_uri = 4; @@ -1296,11 +1302,15 @@ public java.lang.String getMachineTypeUri() { } /** *
-     * Optional. The Google Compute Engine machine type used for cluster instances.
+     * Optional. The Compute Engine machine type used for cluster instances.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `n1-standard-2`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the machine type
+     * resource, for example, `n1-standard-2`.
      * 
* * string machine_type_uri = 4; @@ -1320,11 +1330,15 @@ public java.lang.String getMachineTypeUri() { } /** *
-     * Optional. The Google Compute Engine machine type used for cluster instances.
+     * Optional. The Compute Engine machine type used for cluster instances.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `n1-standard-2`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the machine type
+     * resource, for example, `n1-standard-2`.
      * 
* * string machine_type_uri = 4; @@ -1341,11 +1355,15 @@ public Builder setMachineTypeUri( } /** *
-     * Optional. The Google Compute Engine machine type used for cluster instances.
+     * Optional. The Compute Engine machine type used for cluster instances.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `n1-standard-2`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the machine type
+     * resource, for example, `n1-standard-2`.
      * 
* * string machine_type_uri = 4; @@ -1358,11 +1376,15 @@ public Builder clearMachineTypeUri() { } /** *
-     * Optional. The Google Compute Engine machine type used for cluster instances.
+     * Optional. The Compute Engine machine type used for cluster instances.
      * A full URL, partial URI, or short name are valid. Examples:
      * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
      * * `n1-standard-2`
+     * **Auto Zone Exception**: If you are using the Cloud Dataproc
+     * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+     * feature, you must use the short name of the machine type
+     * resource, for example, `n1-standard-2`.
      * 
* * string machine_type_uri = 4; @@ -1575,7 +1597,7 @@ public Builder clearIsPreemptible() { com.google.cloud.dataproc.v1.ManagedGroupConfig, com.google.cloud.dataproc.v1.ManagedGroupConfig.Builder, com.google.cloud.dataproc.v1.ManagedGroupConfigOrBuilder> managedGroupConfigBuilder_; /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1587,7 +1609,7 @@ public boolean hasManagedGroupConfig() { } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1603,7 +1625,7 @@ public com.google.cloud.dataproc.v1.ManagedGroupConfig getManagedGroupConfig() { } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1625,7 +1647,7 @@ public Builder setManagedGroupConfig(com.google.cloud.dataproc.v1.ManagedGroupCo } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1645,7 +1667,7 @@ public Builder setManagedGroupConfig( } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1669,7 +1691,7 @@ public Builder mergeManagedGroupConfig(com.google.cloud.dataproc.v1.ManagedGroup } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1689,7 +1711,7 @@ public Builder clearManagedGroupConfig() { } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1703,7 +1725,7 @@ public com.google.cloud.dataproc.v1.ManagedGroupConfig.Builder getManagedGroupCo } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1720,7 +1742,7 @@ public com.google.cloud.dataproc.v1.ManagedGroupConfigOrBuilder getManagedGroupC } /** *
-     * Output-only. The config for Google Compute Engine Instance Group
+     * Output only. The config for Compute Engine Instance Group
      * Manager that manages this group.
      * This is only used for preemptible instance groups.
      * 
@@ -1755,7 +1777,7 @@ private void ensureAcceleratorsIsMutable() { /** *
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1772,7 +1794,7 @@ public java.util.List getAcceler
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1789,7 +1811,7 @@ public int getAcceleratorsCount() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1806,7 +1828,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfig getAccelerators(int index)
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1830,7 +1852,7 @@ public Builder setAccelerators(
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1851,7 +1873,7 @@ public Builder setAccelerators(
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1874,7 +1896,7 @@ public Builder addAccelerators(com.google.cloud.dataproc.v1.AcceleratorConfig va
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1898,7 +1920,7 @@ public Builder addAccelerators(
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1919,7 +1941,7 @@ public Builder addAccelerators(
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1940,7 +1962,7 @@ public Builder addAccelerators(
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1962,7 +1984,7 @@ public Builder addAllAccelerators(
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -1982,7 +2004,7 @@ public Builder clearAccelerators() {
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -2002,7 +2024,7 @@ public Builder removeAccelerators(int index) {
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -2016,7 +2038,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfig.Builder getAcceleratorsBui
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -2033,7 +2055,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfigOrBuilder getAcceleratorsOr
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -2051,7 +2073,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfigOrBuilder getAcceleratorsOr
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -2065,7 +2087,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfig.Builder addAcceleratorsBui
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
@@ -2080,7 +2102,7 @@ public com.google.cloud.dataproc.v1.AcceleratorConfig.Builder addAcceleratorsBui
     }
     /**
      * 
-     * Optional. The Google Compute Engine accelerator configuration for these
+     * Optional. The Compute Engine accelerator configuration for these
      * instances.
      * **Beta Feature**: This feature is still under development. It may be
      * changed before final release.
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfigOrBuilder.java
index 602109d951a8..16477d918e75 100644
--- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfigOrBuilder.java
+++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceGroupConfigOrBuilder.java
@@ -19,9 +19,8 @@ public interface InstanceGroupConfigOrBuilder extends
 
   /**
    * 
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -30,9 +29,8 @@ public interface InstanceGroupConfigOrBuilder extends getInstanceNamesList(); /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -40,9 +38,8 @@ public interface InstanceGroupConfigOrBuilder extends int getInstanceNamesCount(); /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -50,9 +47,8 @@ public interface InstanceGroupConfigOrBuilder extends java.lang.String getInstanceNames(int index); /** *
-   * Optional. The list of instance names. Cloud Dataproc derives the names from
-   * `cluster_name`, `num_instances`, and the instance group if not set by user
-   * (recommended practice is to let Cloud Dataproc derive the name).
+   * Output only. The list of instance names. Cloud Dataproc derives the names
+   * from `cluster_name`, `num_instances`, and the instance group.
    * 
* * repeated string instance_names = 2; @@ -62,8 +58,9 @@ public interface InstanceGroupConfigOrBuilder extends /** *
-   * Output-only. The Google Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -71,8 +68,9 @@ public interface InstanceGroupConfigOrBuilder extends java.lang.String getImageUri(); /** *
-   * Output-only. The Google Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -82,11 +80,15 @@ public interface InstanceGroupConfigOrBuilder extends /** *
-   * Optional. The Google Compute Engine machine type used for cluster instances.
+   * Optional. The Compute Engine machine type used for cluster instances.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `n1-standard-2`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the machine type
+   * resource, for example, `n1-standard-2`.
    * 
* * string machine_type_uri = 4; @@ -94,11 +96,15 @@ public interface InstanceGroupConfigOrBuilder extends java.lang.String getMachineTypeUri(); /** *
-   * Optional. The Google Compute Engine machine type used for cluster instances.
+   * Optional. The Compute Engine machine type used for cluster instances.
    * A full URL, partial URI, or short name are valid. Examples:
    * * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`
    * * `n1-standard-2`
+   * **Auto Zone Exception**: If you are using the Cloud Dataproc
+   * [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement)
+   * feature, you must use the short name of the machine type
+   * resource, for example, `n1-standard-2`.
    * 
* * string machine_type_uri = 4; @@ -142,7 +148,7 @@ public interface InstanceGroupConfigOrBuilder extends /** *
-   * Output-only. The config for Google Compute Engine Instance Group
+   * Output only. The config for Compute Engine Instance Group
    * Manager that manages this group.
    * This is only used for preemptible instance groups.
    * 
@@ -152,7 +158,7 @@ public interface InstanceGroupConfigOrBuilder extends boolean hasManagedGroupConfig(); /** *
-   * Output-only. The config for Google Compute Engine Instance Group
+   * Output only. The config for Compute Engine Instance Group
    * Manager that manages this group.
    * This is only used for preemptible instance groups.
    * 
@@ -162,7 +168,7 @@ public interface InstanceGroupConfigOrBuilder extends com.google.cloud.dataproc.v1.ManagedGroupConfig getManagedGroupConfig(); /** *
-   * Output-only. The config for Google Compute Engine Instance Group
+   * Output only. The config for Compute Engine Instance Group
    * Manager that manages this group.
    * This is only used for preemptible instance groups.
    * 
@@ -173,7 +179,7 @@ public interface InstanceGroupConfigOrBuilder extends /** *
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -185,7 +191,7 @@ public interface InstanceGroupConfigOrBuilder extends
       getAcceleratorsList();
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -196,7 +202,7 @@ public interface InstanceGroupConfigOrBuilder extends
   com.google.cloud.dataproc.v1.AcceleratorConfig getAccelerators(int index);
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -207,7 +213,7 @@ public interface InstanceGroupConfigOrBuilder extends
   int getAcceleratorsCount();
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
@@ -219,7 +225,7 @@ public interface InstanceGroupConfigOrBuilder extends
       getAcceleratorsOrBuilderList();
   /**
    * 
-   * Optional. The Google Compute Engine accelerator configuration for these
+   * Optional. The Compute Engine accelerator configuration for these
    * instances.
    * **Beta Feature**: This feature is still under development. It may be
    * changed before final release.
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequest.java
new file mode 100644
index 000000000000..a43b35e0cc32
--- /dev/null
+++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequest.java
@@ -0,0 +1,1017 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/cloud/dataproc/v1/workflow_templates.proto
+
+package com.google.cloud.dataproc.v1;
+
+/**
+ * 
+ * A request to instantiate an inline workflow template.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest} + */ +public final class InstantiateInlineWorkflowTemplateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) + InstantiateInlineWorkflowTemplateRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use InstantiateInlineWorkflowTemplateRequest.newBuilder() to construct. + private InstantiateInlineWorkflowTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InstantiateInlineWorkflowTemplateRequest() { + parent_ = ""; + requestId_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private InstantiateInlineWorkflowTemplateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + parent_ = s; + break; + } + case 18: { + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder subBuilder = null; + if (template_ != null) { + subBuilder = template_.toBuilder(); + } + template_ = input.readMessage(com.google.cloud.dataproc.v1.WorkflowTemplate.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(template_); + template_ = subBuilder.buildPartial(); + } + + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + private volatile java.lang.Object parent_; + /** + *
+   * Required. The "resource name" of the workflow template region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + *
+   * Required. The "resource name" of the workflow template region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + public com.google.protobuf.ByteString + getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TEMPLATE_FIELD_NUMBER = 2; + private com.google.cloud.dataproc.v1.WorkflowTemplate template_; + /** + *
+   * Required. The workflow template to instantiate.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public boolean hasTemplate() { + return template_ != null; + } + /** + *
+   * Required. The workflow template to instantiate.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate() { + return template_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } + /** + *
+   * Required. The workflow template to instantiate.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder() { + return getTemplate(); + } + + public static final int REQUEST_ID_FIELD_NUMBER = 3; + private volatile java.lang.Object requestId_; + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 3; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } + } + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 3; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getParentBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (template_ != null) { + output.writeMessage(2, getTemplate()); + } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getParentBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (template_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getTemplate()); + } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) obj; + + boolean result = true; + result = result && getParent() + .equals(other.getParent()); + result = result && (hasTemplate() == other.hasTemplate()); + if (hasTemplate()) { + result = result && getTemplate() + .equals(other.getTemplate()); + } + result = result && getRequestId() + .equals(other.getRequestId()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasTemplate()) { + hash = (37 * hash) + TEMPLATE_FIELD_NUMBER; + hash = (53 * hash) + getTemplate().hashCode(); + } + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to instantiate an inline workflow template.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + parent_ = ""; + + if (templateBuilder_ == null) { + template_ = null; + } else { + template_ = null; + templateBuilder_ = null; + } + requestId_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest build() { + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest buildPartial() { + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest(this); + result.parent_ = parent_; + if (templateBuilder_ == null) { + result.template_ = template_; + } else { + result.template_ = templateBuilder_.build(); + } + result.requestId_ = requestId_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest other) { + if (other == com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest.getDefaultInstance()) return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + onChanged(); + } + if (other.hasTemplate()) { + mergeTemplate(other.getTemplate()); + } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object parent_ = ""; + /** + *
+     * Required. The "resource name" of the workflow template region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public com.google.protobuf.ByteString + getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder setParent( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + parent_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder clearParent() { + + parent_ = getDefaultInstance().getParent(); + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder setParentBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + parent_ = value; + onChanged(); + return this; + } + + private com.google.cloud.dataproc.v1.WorkflowTemplate template_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> templateBuilder_; + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public boolean hasTemplate() { + return templateBuilder_ != null || template_ != null; + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate() { + if (templateBuilder_ == null) { + return template_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } else { + return templateBuilder_.getMessage(); + } + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder setTemplate(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + template_ = value; + onChanged(); + } else { + templateBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder setTemplate( + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder builderForValue) { + if (templateBuilder_ == null) { + template_ = builderForValue.build(); + onChanged(); + } else { + templateBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder mergeTemplate(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templateBuilder_ == null) { + if (template_ != null) { + template_ = + com.google.cloud.dataproc.v1.WorkflowTemplate.newBuilder(template_).mergeFrom(value).buildPartial(); + } else { + template_ = value; + } + onChanged(); + } else { + templateBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public Builder clearTemplate() { + if (templateBuilder_ == null) { + template_ = null; + onChanged(); + } else { + template_ = null; + templateBuilder_ = null; + } + + return this; + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate.Builder getTemplateBuilder() { + + onChanged(); + return getTemplateFieldBuilder().getBuilder(); + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder() { + if (templateBuilder_ != null) { + return templateBuilder_.getMessageOrBuilder(); + } else { + return template_ == null ? + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } + } + /** + *
+     * Required. The workflow template to instantiate.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> + getTemplateFieldBuilder() { + if (templateBuilder_ == null) { + templateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder>( + getTemplate(), + getParentForChildren(), + isClean()); + template_ = null; + } + return templateBuilder_; + } + + private java.lang.Object requestId_ = ""; + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 3; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 3; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 3; + */ + public Builder setRequestId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + requestId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 3; + */ + public Builder clearRequestId() { + + requestId_ = getDefaultInstance().getRequestId(); + onChanged(); + return this; + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 3; + */ + public Builder setRequestIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + requestId_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) + private static final com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest(); + } + + public static com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InstantiateInlineWorkflowTemplateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InstantiateInlineWorkflowTemplateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequestOrBuilder.java new file mode 100644 index 000000000000..b28b314a088a --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateInlineWorkflowTemplateRequestOrBuilder.java @@ -0,0 +1,86 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface InstantiateInlineWorkflowTemplateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The "resource name" of the workflow template region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + java.lang.String getParent(); + /** + *
+   * Required. The "resource name" of the workflow template region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + com.google.protobuf.ByteString + getParentBytes(); + + /** + *
+   * Required. The workflow template to instantiate.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + boolean hasTemplate(); + /** + *
+   * Required. The workflow template to instantiate.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate(); + /** + *
+   * Required. The workflow template to instantiate.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 2; + */ + com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder(); + + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 3; + */ + java.lang.String getRequestId(); + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 3; + */ + com.google.protobuf.ByteString + getRequestIdBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequest.java new file mode 100644 index 000000000000..41540a9150cb --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequest.java @@ -0,0 +1,1211 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A request to instantiate a workflow template.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest} + */ +public final class InstantiateWorkflowTemplateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) + InstantiateWorkflowTemplateRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use InstantiateWorkflowTemplateRequest.newBuilder() to construct. + private InstantiateWorkflowTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InstantiateWorkflowTemplateRequest() { + name_ = ""; + version_ = 0; + requestId_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private InstantiateWorkflowTemplateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 16: { + + version_ = input.readInt32(); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + parameters_ = com.google.protobuf.MapField.newMapField( + ParametersDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000008; + } + com.google.protobuf.MapEntry + parameters__ = input.readMessage( + ParametersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + parameters_.getMutableMap().put( + parameters__.getKey(), parameters__.getValue()); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 6: + return internalGetParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VERSION_FIELD_NUMBER = 2; + private int version_; + /** + *
+   * Optional. The version of workflow template to instantiate. If specified,
+   * the workflow will be instantiated only if the current version of
+   * the workflow template has the supplied version.
+   * This option cannot be used to instantiate a previous version of
+   * workflow template.
+   * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + + public static final int REQUEST_ID_FIELD_NUMBER = 5; + private volatile java.lang.Object requestId_; + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } + } + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PARAMETERS_FIELD_NUMBER = 6; + private static final class ParametersDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> parameters_; + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + + public boolean containsParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetParameters().getMap().containsKey(key); + } + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + + public java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + + public java.lang.String getParametersOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (version_ != 0) { + output.writeInt32(2, version_); + } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, requestId_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetParameters(), + ParametersDefaultEntryHolder.defaultEntry, + 6); + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, version_); + } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, requestId_); + } + for (java.util.Map.Entry entry + : internalGetParameters().getMap().entrySet()) { + com.google.protobuf.MapEntry + parameters__ = ParametersDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, parameters__); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && (getVersion() + == other.getVersion()); + result = result && getRequestId() + .equals(other.getRequestId()); + result = result && internalGetParameters().equals( + other.internalGetParameters()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); + if (!internalGetParameters().getMap().isEmpty()) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + internalGetParameters().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to instantiate a workflow template.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 6: + return internalGetParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 6: + return internalGetMutableParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + version_ = 0; + + requestId_ = ""; + + internalGetMutableParameters().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest build() { + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest buildPartial() { + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + result.version_ = version_; + result.requestId_ = requestId_; + result.parameters_ = internalGetParameters(); + result.parameters_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest other) { + if (other == com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } + internalGetMutableParameters().mergeFrom( + other.internalGetParameters()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the workflow template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int version_ ; + /** + *
+     * Optional. The version of workflow template to instantiate. If specified,
+     * the workflow will be instantiated only if the current version of
+     * the workflow template has the supplied version.
+     * This option cannot be used to instantiate a previous version of
+     * workflow template.
+     * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + /** + *
+     * Optional. The version of workflow template to instantiate. If specified,
+     * the workflow will be instantiated only if the current version of
+     * the workflow template has the supplied version.
+     * This option cannot be used to instantiate a previous version of
+     * workflow template.
+     * 
+ * + * int32 version = 2; + */ + public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The version of workflow template to instantiate. If specified,
+     * the workflow will be instantiated only if the current version of
+     * the workflow template has the supplied version.
+     * This option cannot be used to instantiate a previous version of
+     * workflow template.
+     * 
+ * + * int32 version = 2; + */ + public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + + private java.lang.Object requestId_ = ""; + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public Builder setRequestId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + requestId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public Builder clearRequestId() { + + requestId_ = getDefaultInstance().getRequestId(); + onChanged(); + return this; + } + /** + *
+     * Optional. A tag that prevents multiple concurrent workflow
+     * instances with the same tag from running. This mitigates risk of
+     * concurrent instances started due to retries.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 5; + */ + public Builder setRequestIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + requestId_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> parameters_; + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + private com.google.protobuf.MapField + internalGetMutableParameters() { + onChanged();; + if (parameters_ == null) { + parameters_ = com.google.protobuf.MapField.newMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + if (!parameters_.isMutable()) { + parameters_ = parameters_.copy(); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + + public boolean containsParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetParameters().getMap().containsKey(key); + } + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + + public java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + + public java.lang.String getParametersOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearParameters() { + internalGetMutableParameters().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + + public Builder removeParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableParameters().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableParameters() { + return internalGetMutableParameters().getMutableMap(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + public Builder putParameters( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableParameters().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 6; + */ + + public Builder putAllParameters( + java.util.Map values) { + internalGetMutableParameters().getMutableMap() + .putAll(values); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) + private static final com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest(); + } + + public static com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InstantiateWorkflowTemplateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InstantiateWorkflowTemplateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequestOrBuilder.java new file mode 100644 index 000000000000..85c0d06fe87b --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstantiateWorkflowTemplateRequestOrBuilder.java @@ -0,0 +1,133 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface InstantiateWorkflowTemplateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Required. The "resource name" of the workflow template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Optional. The version of workflow template to instantiate. If specified,
+   * the workflow will be instantiated only if the current version of
+   * the workflow template has the supplied version.
+   * This option cannot be used to instantiate a previous version of
+   * workflow template.
+   * 
+ * + * int32 version = 2; + */ + int getVersion(); + + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + java.lang.String getRequestId(); + /** + *
+   * Optional. A tag that prevents multiple concurrent workflow
+   * instances with the same tag from running. This mitigates risk of
+   * concurrent instances started due to retries.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The tag must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 5; + */ + com.google.protobuf.ByteString + getRequestIdBytes(); + + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + int getParametersCount(); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + boolean containsParameters( + java.lang.String key); + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getParameters(); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + java.util.Map + getParametersMap(); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + + java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 6; + */ + + java.lang.String getParametersOrThrow( + java.lang.String key); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Job.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Job.java index 8bc9bbdbbeaa..4da1be352045 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Job.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/Job.java @@ -24,6 +24,7 @@ private Job() { yarnApplications_ = java.util.Collections.emptyList(); driverOutputResourceUri_ = ""; driverControlFilesUri_ = ""; + jobUuid_ = ""; } @java.lang.Override @@ -229,6 +230,12 @@ private Job( break; } + case 178: { + java.lang.String s = input.readStringRequireUtf8(); + + jobUuid_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -636,7 +643,7 @@ public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder private com.google.cloud.dataproc.v1.JobStatus status_; /** *
-   * Output-only. The job status. Additional application-specific
+   * Output only. The job status. Additional application-specific
    * status information may be contained in the <code>type_job</code>
    * and <code>yarn_applications</code> fields.
    * 
@@ -648,7 +655,7 @@ public boolean hasStatus() { } /** *
-   * Output-only. The job status. Additional application-specific
+   * Output only. The job status. Additional application-specific
    * status information may be contained in the <code>type_job</code>
    * and <code>yarn_applications</code> fields.
    * 
@@ -660,7 +667,7 @@ public com.google.cloud.dataproc.v1.JobStatus getStatus() { } /** *
-   * Output-only. The job status. Additional application-specific
+   * Output only. The job status. Additional application-specific
    * status information may be contained in the <code>type_job</code>
    * and <code>yarn_applications</code> fields.
    * 
@@ -675,7 +682,7 @@ public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusOrBuilder() { private java.util.List statusHistory_; /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -685,7 +692,7 @@ public java.util.List getStatusHistoryLi } /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -696,7 +703,7 @@ public java.util.List getStatusHistoryLi } /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -706,7 +713,7 @@ public int getStatusHistoryCount() { } /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -716,7 +723,7 @@ public com.google.cloud.dataproc.v1.JobStatus getStatusHistory(int index) { } /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -730,7 +737,7 @@ public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder private java.util.List yarnApplications_; /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -742,7 +749,7 @@ public java.util.List getYarnAppli } /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -755,7 +762,7 @@ public java.util.List getYarnAppli } /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -767,7 +774,7 @@ public int getYarnApplicationsCount() { } /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -779,7 +786,7 @@ public com.google.cloud.dataproc.v1.YarnApplication getYarnApplications(int inde } /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -795,7 +802,7 @@ public com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplications private volatile java.lang.Object driverOutputResourceUri_; /** *
-   * Output-only. A URI pointing to the location of the stdout of the job's
+   * Output only. A URI pointing to the location of the stdout of the job's
    * driver program.
    * 
* @@ -815,7 +822,7 @@ public java.lang.String getDriverOutputResourceUri() { } /** *
-   * Output-only. A URI pointing to the location of the stdout of the job's
+   * Output only. A URI pointing to the location of the stdout of the job's
    * driver program.
    * 
* @@ -839,7 +846,7 @@ public java.lang.String getDriverOutputResourceUri() { private volatile java.lang.Object driverControlFilesUri_; /** *
-   * Output-only. If present, the location of miscellaneous control files
+   * Output only. If present, the location of miscellaneous control files
    * which may be used as part of job setup and handling. If not present,
    * control files may be placed in the same location as `driver_output_uri`.
    * 
@@ -860,7 +867,7 @@ public java.lang.String getDriverControlFilesUri() { } /** *
-   * Output-only. If present, the location of miscellaneous control files
+   * Output only. If present, the location of miscellaneous control files
    * which may be used as part of job setup and handling. If not present,
    * control files may be placed in the same location as `driver_output_uri`.
    * 
@@ -1026,6 +1033,52 @@ public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilde return getScheduling(); } + public static final int JOB_UUID_FIELD_NUMBER = 22; + private volatile java.lang.Object jobUuid_; + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + public java.lang.String getJobUuid() { + java.lang.Object ref = jobUuid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobUuid_ = s; + return s; + } + } + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + public com.google.protobuf.ByteString + getJobUuidBytes() { + java.lang.Object ref = jobUuid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -1088,6 +1141,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (scheduling_ != null) { output.writeMessage(20, getScheduling()); } + if (!getJobUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 22, jobUuid_); + } unknownFields.writeTo(output); } @@ -1161,6 +1217,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(20, getScheduling()); } + if (!getJobUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(22, jobUuid_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1207,6 +1266,8 @@ public boolean equals(final java.lang.Object obj) { result = result && getScheduling() .equals(other.getScheduling()); } + result = result && getJobUuid() + .equals(other.getJobUuid()); result = result && getTypeJobCase().equals( other.getTypeJobCase()); if (!result) return false; @@ -1281,6 +1342,8 @@ public int hashCode() { hash = (37 * hash) + SCHEDULING_FIELD_NUMBER; hash = (53 * hash) + getScheduling().hashCode(); } + hash = (37 * hash) + JOB_UUID_FIELD_NUMBER; + hash = (53 * hash) + getJobUuid().hashCode(); switch (typeJobCase_) { case 3: hash = (37 * hash) + HADOOP_JOB_FIELD_NUMBER; @@ -1511,6 +1574,8 @@ public Builder clear() { scheduling_ = null; schedulingBuilder_ = null; } + jobUuid_ = ""; + typeJobCase_ = 0; typeJob_ = null; return this; @@ -1625,6 +1690,7 @@ public com.google.cloud.dataproc.v1.Job buildPartial() { } else { result.scheduling_ = schedulingBuilder_.build(); } + result.jobUuid_ = jobUuid_; result.bitField0_ = to_bitField0_; result.typeJobCase_ = typeJobCase_; onBuilt(); @@ -1749,6 +1815,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1.Job other) { if (other.hasScheduling()) { mergeScheduling(other.getScheduling()); } + if (!other.getJobUuid().isEmpty()) { + jobUuid_ = other.jobUuid_; + onChanged(); + } switch (other.getTypeJobCase()) { case HADOOP_JOB: { mergeHadoopJob(other.getHadoopJob()); @@ -3202,7 +3272,7 @@ public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder com.google.cloud.dataproc.v1.JobStatus, com.google.cloud.dataproc.v1.JobStatus.Builder, com.google.cloud.dataproc.v1.JobStatusOrBuilder> statusBuilder_; /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3214,7 +3284,7 @@ public boolean hasStatus() { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3230,7 +3300,7 @@ public com.google.cloud.dataproc.v1.JobStatus getStatus() { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3252,7 +3322,7 @@ public Builder setStatus(com.google.cloud.dataproc.v1.JobStatus value) { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3272,7 +3342,7 @@ public Builder setStatus( } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3296,7 +3366,7 @@ public Builder mergeStatus(com.google.cloud.dataproc.v1.JobStatus value) { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3316,7 +3386,7 @@ public Builder clearStatus() { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3330,7 +3400,7 @@ public com.google.cloud.dataproc.v1.JobStatus.Builder getStatusBuilder() { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3347,7 +3417,7 @@ public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusOrBuilder() { } /** *
-     * Output-only. The job status. Additional application-specific
+     * Output only. The job status. Additional application-specific
      * status information may be contained in the <code>type_job</code>
      * and <code>yarn_applications</code> fields.
      * 
@@ -3382,7 +3452,7 @@ private void ensureStatusHistoryIsMutable() { /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3396,7 +3466,7 @@ public java.util.List getStatusHistoryLi } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3410,7 +3480,7 @@ public int getStatusHistoryCount() { } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3424,7 +3494,7 @@ public com.google.cloud.dataproc.v1.JobStatus getStatusHistory(int index) { } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3445,7 +3515,7 @@ public Builder setStatusHistory( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3463,7 +3533,7 @@ public Builder setStatusHistory( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3483,7 +3553,7 @@ public Builder addStatusHistory(com.google.cloud.dataproc.v1.JobStatus value) { } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3504,7 +3574,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3522,7 +3592,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3540,7 +3610,7 @@ public Builder addStatusHistory( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3559,7 +3629,7 @@ public Builder addAllStatusHistory( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3576,7 +3646,7 @@ public Builder clearStatusHistory() { } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3593,7 +3663,7 @@ public Builder removeStatusHistory(int index) { } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3604,7 +3674,7 @@ public com.google.cloud.dataproc.v1.JobStatus.Builder getStatusHistoryBuilder( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3618,7 +3688,7 @@ public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3633,7 +3703,7 @@ public com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3644,7 +3714,7 @@ public com.google.cloud.dataproc.v1.JobStatus.Builder addStatusHistoryBuilder() } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3656,7 +3726,7 @@ public com.google.cloud.dataproc.v1.JobStatus.Builder addStatusHistoryBuilder( } /** *
-     * Output-only. The previous job status.
+     * Output only. The previous job status.
      * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -3694,7 +3764,7 @@ private void ensureYarnApplicationsIsMutable() { /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3710,7 +3780,7 @@ public java.util.List getYarnAppli } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3726,7 +3796,7 @@ public int getYarnApplicationsCount() { } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3742,7 +3812,7 @@ public com.google.cloud.dataproc.v1.YarnApplication getYarnApplications(int inde } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3765,7 +3835,7 @@ public Builder setYarnApplications( } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3785,7 +3855,7 @@ public Builder setYarnApplications( } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3807,7 +3877,7 @@ public Builder addYarnApplications(com.google.cloud.dataproc.v1.YarnApplication } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3830,7 +3900,7 @@ public Builder addYarnApplications( } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3850,7 +3920,7 @@ public Builder addYarnApplications( } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3870,7 +3940,7 @@ public Builder addYarnApplications( } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3891,7 +3961,7 @@ public Builder addAllYarnApplications( } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3910,7 +3980,7 @@ public Builder clearYarnApplications() { } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3929,7 +3999,7 @@ public Builder removeYarnApplications(int index) { } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3942,7 +4012,7 @@ public com.google.cloud.dataproc.v1.YarnApplication.Builder getYarnApplicationsB } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3958,7 +4028,7 @@ public com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplications } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3975,7 +4045,7 @@ public com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplications } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -3988,7 +4058,7 @@ public com.google.cloud.dataproc.v1.YarnApplication.Builder addYarnApplicationsB } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -4002,7 +4072,7 @@ public com.google.cloud.dataproc.v1.YarnApplication.Builder addYarnApplicationsB } /** *
-     * Output-only. The collection of YARN applications spun up by this job.
+     * Output only. The collection of YARN applications spun up by this job.
      * **Beta** Feature: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -4031,7 +4101,7 @@ public com.google.cloud.dataproc.v1.YarnApplication.Builder addYarnApplicationsB private java.lang.Object driverOutputResourceUri_ = ""; /** *
-     * Output-only. A URI pointing to the location of the stdout of the job's
+     * Output only. A URI pointing to the location of the stdout of the job's
      * driver program.
      * 
* @@ -4051,7 +4121,7 @@ public java.lang.String getDriverOutputResourceUri() { } /** *
-     * Output-only. A URI pointing to the location of the stdout of the job's
+     * Output only. A URI pointing to the location of the stdout of the job's
      * driver program.
      * 
* @@ -4072,7 +4142,7 @@ public java.lang.String getDriverOutputResourceUri() { } /** *
-     * Output-only. A URI pointing to the location of the stdout of the job's
+     * Output only. A URI pointing to the location of the stdout of the job's
      * driver program.
      * 
* @@ -4090,7 +4160,7 @@ public Builder setDriverOutputResourceUri( } /** *
-     * Output-only. A URI pointing to the location of the stdout of the job's
+     * Output only. A URI pointing to the location of the stdout of the job's
      * driver program.
      * 
* @@ -4104,7 +4174,7 @@ public Builder clearDriverOutputResourceUri() { } /** *
-     * Output-only. A URI pointing to the location of the stdout of the job's
+     * Output only. A URI pointing to the location of the stdout of the job's
      * driver program.
      * 
* @@ -4125,7 +4195,7 @@ public Builder setDriverOutputResourceUriBytes( private java.lang.Object driverControlFilesUri_ = ""; /** *
-     * Output-only. If present, the location of miscellaneous control files
+     * Output only. If present, the location of miscellaneous control files
      * which may be used as part of job setup and handling. If not present,
      * control files may be placed in the same location as `driver_output_uri`.
      * 
@@ -4146,7 +4216,7 @@ public java.lang.String getDriverControlFilesUri() { } /** *
-     * Output-only. If present, the location of miscellaneous control files
+     * Output only. If present, the location of miscellaneous control files
      * which may be used as part of job setup and handling. If not present,
      * control files may be placed in the same location as `driver_output_uri`.
      * 
@@ -4168,7 +4238,7 @@ public java.lang.String getDriverControlFilesUri() { } /** *
-     * Output-only. If present, the location of miscellaneous control files
+     * Output only. If present, the location of miscellaneous control files
      * which may be used as part of job setup and handling. If not present,
      * control files may be placed in the same location as `driver_output_uri`.
      * 
@@ -4187,7 +4257,7 @@ public Builder setDriverControlFilesUri( } /** *
-     * Output-only. If present, the location of miscellaneous control files
+     * Output only. If present, the location of miscellaneous control files
      * which may be used as part of job setup and handling. If not present,
      * control files may be placed in the same location as `driver_output_uri`.
      * 
@@ -4202,7 +4272,7 @@ public Builder clearDriverControlFilesUri() { } /** *
-     * Output-only. If present, the location of miscellaneous control files
+     * Output only. If present, the location of miscellaneous control files
      * which may be used as part of job setup and handling. If not present,
      * control files may be placed in the same location as `driver_output_uri`.
      * 
@@ -4559,6 +4629,105 @@ public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilde } return schedulingBuilder_; } + + private java.lang.Object jobUuid_ = ""; + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public java.lang.String getJobUuid() { + java.lang.Object ref = jobUuid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobUuid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public com.google.protobuf.ByteString + getJobUuidBytes() { + java.lang.Object ref = jobUuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public Builder setJobUuid( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + jobUuid_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public Builder clearJobUuid() { + + jobUuid_ = getDefaultInstance().getJobUuid(); + onChanged(); + return this; + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public Builder setJobUuidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + jobUuid_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobOrBuilder.java index 7c4fbdbdf886..770d41fc3380 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobOrBuilder.java @@ -221,7 +221,7 @@ public interface JobOrBuilder extends /** *
-   * Output-only. The job status. Additional application-specific
+   * Output only. The job status. Additional application-specific
    * status information may be contained in the <code>type_job</code>
    * and <code>yarn_applications</code> fields.
    * 
@@ -231,7 +231,7 @@ public interface JobOrBuilder extends boolean hasStatus(); /** *
-   * Output-only. The job status. Additional application-specific
+   * Output only. The job status. Additional application-specific
    * status information may be contained in the <code>type_job</code>
    * and <code>yarn_applications</code> fields.
    * 
@@ -241,7 +241,7 @@ public interface JobOrBuilder extends com.google.cloud.dataproc.v1.JobStatus getStatus(); /** *
-   * Output-only. The job status. Additional application-specific
+   * Output only. The job status. Additional application-specific
    * status information may be contained in the <code>type_job</code>
    * and <code>yarn_applications</code> fields.
    * 
@@ -252,7 +252,7 @@ public interface JobOrBuilder extends /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -261,7 +261,7 @@ public interface JobOrBuilder extends getStatusHistoryList(); /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -269,7 +269,7 @@ public interface JobOrBuilder extends com.google.cloud.dataproc.v1.JobStatus getStatusHistory(int index); /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -277,7 +277,7 @@ public interface JobOrBuilder extends int getStatusHistoryCount(); /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -286,7 +286,7 @@ public interface JobOrBuilder extends getStatusHistoryOrBuilderList(); /** *
-   * Output-only. The previous job status.
+   * Output only. The previous job status.
    * 
* * repeated .google.cloud.dataproc.v1.JobStatus status_history = 13; @@ -296,7 +296,7 @@ com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder( /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -307,7 +307,7 @@ com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder( getYarnApplicationsList(); /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -317,7 +317,7 @@ com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder( com.google.cloud.dataproc.v1.YarnApplication getYarnApplications(int index); /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -327,7 +327,7 @@ com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder( int getYarnApplicationsCount(); /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -338,7 +338,7 @@ com.google.cloud.dataproc.v1.JobStatusOrBuilder getStatusHistoryOrBuilder( getYarnApplicationsOrBuilderList(); /** *
-   * Output-only. The collection of YARN applications spun up by this job.
+   * Output only. The collection of YARN applications spun up by this job.
    * **Beta** Feature: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -350,7 +350,7 @@ com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplicationsOrBuild /** *
-   * Output-only. A URI pointing to the location of the stdout of the job's
+   * Output only. A URI pointing to the location of the stdout of the job's
    * driver program.
    * 
* @@ -359,7 +359,7 @@ com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplicationsOrBuild java.lang.String getDriverOutputResourceUri(); /** *
-   * Output-only. A URI pointing to the location of the stdout of the job's
+   * Output only. A URI pointing to the location of the stdout of the job's
    * driver program.
    * 
* @@ -370,7 +370,7 @@ com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplicationsOrBuild /** *
-   * Output-only. If present, the location of miscellaneous control files
+   * Output only. If present, the location of miscellaneous control files
    * which may be used as part of job setup and handling. If not present,
    * control files may be placed in the same location as `driver_output_uri`.
    * 
@@ -380,7 +380,7 @@ com.google.cloud.dataproc.v1.YarnApplicationOrBuilder getYarnApplicationsOrBuild java.lang.String getDriverControlFilesUri(); /** *
-   * Output-only. If present, the location of miscellaneous control files
+   * Output only. If present, the location of miscellaneous control files
    * which may be used as part of job setup and handling. If not present,
    * control files may be placed in the same location as `driver_output_uri`.
    * 
@@ -494,5 +494,27 @@ java.lang.String getLabelsOrThrow( */ com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder(); + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + java.lang.String getJobUuid(); + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + com.google.protobuf.ByteString + getJobUuidBytes(); + public com.google.cloud.dataproc.v1.Job.TypeJobCase getTypeJobCase(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacement.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacement.java index 8e65860185a5..20d4b2ed39c9 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacement.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacement.java @@ -138,7 +138,7 @@ public java.lang.String getClusterName() { private volatile java.lang.Object clusterUuid_; /** *
-   * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+   * Output only. A cluster UUID generated by the Cloud Dataproc service when
    * the job is submitted.
    * 
* @@ -158,7 +158,7 @@ public java.lang.String getClusterUuid() { } /** *
-   * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+   * Output only. A cluster UUID generated by the Cloud Dataproc service when
    * the job is submitted.
    * 
* @@ -594,7 +594,7 @@ public Builder setClusterNameBytes( private java.lang.Object clusterUuid_ = ""; /** *
-     * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+     * Output only. A cluster UUID generated by the Cloud Dataproc service when
      * the job is submitted.
      * 
* @@ -614,7 +614,7 @@ public java.lang.String getClusterUuid() { } /** *
-     * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+     * Output only. A cluster UUID generated by the Cloud Dataproc service when
      * the job is submitted.
      * 
* @@ -635,7 +635,7 @@ public java.lang.String getClusterUuid() { } /** *
-     * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+     * Output only. A cluster UUID generated by the Cloud Dataproc service when
      * the job is submitted.
      * 
* @@ -653,7 +653,7 @@ public Builder setClusterUuid( } /** *
-     * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+     * Output only. A cluster UUID generated by the Cloud Dataproc service when
      * the job is submitted.
      * 
* @@ -667,7 +667,7 @@ public Builder clearClusterUuid() { } /** *
-     * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+     * Output only. A cluster UUID generated by the Cloud Dataproc service when
      * the job is submitted.
      * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacementOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacementOrBuilder.java index c03d36027e6a..abf9f41392de 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacementOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobPlacementOrBuilder.java @@ -27,7 +27,7 @@ public interface JobPlacementOrBuilder extends /** *
-   * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+   * Output only. A cluster UUID generated by the Cloud Dataproc service when
    * the job is submitted.
    * 
* @@ -36,7 +36,7 @@ public interface JobPlacementOrBuilder extends java.lang.String getClusterUuid(); /** *
-   * Output-only. A cluster UUID generated by the Cloud Dataproc service when
+   * Output only. A cluster UUID generated by the Cloud Dataproc service when
    * the job is submitted.
    * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobScheduling.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobScheduling.java index 74d3288db7bc..4c53387cac61 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobScheduling.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobScheduling.java @@ -6,8 +6,6 @@ /** *
  * Job scheduling options.
- * **Beta Feature**: These options are available for testing purposes only.
- * They may be changed before final release.
  * 
* * Protobuf type {@code google.cloud.dataproc.v1.JobScheduling} @@ -263,8 +261,6 @@ protected Builder newBuilderForType( /** *
    * Job scheduling options.
-   * **Beta Feature**: These options are available for testing purposes only.
-   * They may be changed before final release.
    * 
* * Protobuf type {@code google.cloud.dataproc.v1.JobScheduling} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatus.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatus.java index fecbf7b65b48..81bf6f364529 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatus.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatus.java @@ -375,11 +375,19 @@ private State(int value) { } /** + *
+   * The job substate.
+   * 
+ * * Protobuf enum {@code google.cloud.dataproc.v1.JobStatus.Substate} */ public enum Substate implements com.google.protobuf.ProtocolMessageEnum { /** + *
+     * The job substate is unknown.
+     * 
+ * * UNSPECIFIED = 0; */ UNSPECIFIED(0), @@ -418,6 +426,10 @@ public enum Substate ; /** + *
+     * The job substate is unknown.
+     * 
+ * * UNSPECIFIED = 0; */ public static final int UNSPECIFIED_VALUE = 0; @@ -532,7 +544,7 @@ private Substate(int value) { private int state_; /** *
-   * Output-only. A state message specifying the overall job state.
+   * Output only. A state message specifying the overall job state.
    * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -542,7 +554,7 @@ public int getStateValue() { } /** *
-   * Output-only. A state message specifying the overall job state.
+   * Output only. A state message specifying the overall job state.
    * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -557,7 +569,7 @@ public com.google.cloud.dataproc.v1.JobStatus.State getState() { private volatile java.lang.Object details_; /** *
-   * Output-only. Optional job state details, such as an error
+   * Output only. Optional job state details, such as an error
    * description if the state is <code>ERROR</code>.
    * 
* @@ -577,7 +589,7 @@ public java.lang.String getDetails() { } /** *
-   * Output-only. Optional job state details, such as an error
+   * Output only. Optional job state details, such as an error
    * description if the state is <code>ERROR</code>.
    * 
* @@ -601,7 +613,7 @@ public java.lang.String getDetails() { private com.google.protobuf.Timestamp stateStartTime_; /** *
-   * Output-only. The time when this state was entered.
+   * Output only. The time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -611,7 +623,7 @@ public boolean hasStateStartTime() { } /** *
-   * Output-only. The time when this state was entered.
+   * Output only. The time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -621,7 +633,7 @@ public com.google.protobuf.Timestamp getStateStartTime() { } /** *
-   * Output-only. The time when this state was entered.
+   * Output only. The time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -634,7 +646,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { private int substate_; /** *
-   * Output-only. Additional state information, which includes
+   * Output only. Additional state information, which includes
    * status reported by the agent.
    * 
* @@ -645,7 +657,7 @@ public int getSubstateValue() { } /** *
-   * Output-only. Additional state information, which includes
+   * Output only. Additional state information, which includes
    * status reported by the agent.
    * 
* @@ -1029,7 +1041,7 @@ public Builder mergeFrom( private int state_ = 0; /** *
-     * Output-only. A state message specifying the overall job state.
+     * Output only. A state message specifying the overall job state.
      * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -1039,7 +1051,7 @@ public int getStateValue() { } /** *
-     * Output-only. A state message specifying the overall job state.
+     * Output only. A state message specifying the overall job state.
      * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -1051,7 +1063,7 @@ public Builder setStateValue(int value) { } /** *
-     * Output-only. A state message specifying the overall job state.
+     * Output only. A state message specifying the overall job state.
      * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -1063,7 +1075,7 @@ public com.google.cloud.dataproc.v1.JobStatus.State getState() { } /** *
-     * Output-only. A state message specifying the overall job state.
+     * Output only. A state message specifying the overall job state.
      * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -1079,7 +1091,7 @@ public Builder setState(com.google.cloud.dataproc.v1.JobStatus.State value) { } /** *
-     * Output-only. A state message specifying the overall job state.
+     * Output only. A state message specifying the overall job state.
      * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -1094,7 +1106,7 @@ public Builder clearState() { private java.lang.Object details_ = ""; /** *
-     * Output-only. Optional job state details, such as an error
+     * Output only. Optional job state details, such as an error
      * description if the state is <code>ERROR</code>.
      * 
* @@ -1114,7 +1126,7 @@ public java.lang.String getDetails() { } /** *
-     * Output-only. Optional job state details, such as an error
+     * Output only. Optional job state details, such as an error
      * description if the state is <code>ERROR</code>.
      * 
* @@ -1135,7 +1147,7 @@ public java.lang.String getDetails() { } /** *
-     * Output-only. Optional job state details, such as an error
+     * Output only. Optional job state details, such as an error
      * description if the state is <code>ERROR</code>.
      * 
* @@ -1153,7 +1165,7 @@ public Builder setDetails( } /** *
-     * Output-only. Optional job state details, such as an error
+     * Output only. Optional job state details, such as an error
      * description if the state is <code>ERROR</code>.
      * 
* @@ -1167,7 +1179,7 @@ public Builder clearDetails() { } /** *
-     * Output-only. Optional job state details, such as an error
+     * Output only. Optional job state details, such as an error
      * description if the state is <code>ERROR</code>.
      * 
* @@ -1190,7 +1202,7 @@ public Builder setDetailsBytes( com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> stateStartTimeBuilder_; /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1200,7 +1212,7 @@ public boolean hasStateStartTime() { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1214,7 +1226,7 @@ public com.google.protobuf.Timestamp getStateStartTime() { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1234,7 +1246,7 @@ public Builder setStateStartTime(com.google.protobuf.Timestamp value) { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1252,7 +1264,7 @@ public Builder setStateStartTime( } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1274,7 +1286,7 @@ public Builder mergeStateStartTime(com.google.protobuf.Timestamp value) { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1292,7 +1304,7 @@ public Builder clearStateStartTime() { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1304,7 +1316,7 @@ public com.google.protobuf.Timestamp.Builder getStateStartTimeBuilder() { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1319,7 +1331,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { } /** *
-     * Output-only. The time when this state was entered.
+     * Output only. The time when this state was entered.
      * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -1341,7 +1353,7 @@ public com.google.protobuf.TimestampOrBuilder getStateStartTimeOrBuilder() { private int substate_ = 0; /** *
-     * Output-only. Additional state information, which includes
+     * Output only. Additional state information, which includes
      * status reported by the agent.
      * 
* @@ -1352,7 +1364,7 @@ public int getSubstateValue() { } /** *
-     * Output-only. Additional state information, which includes
+     * Output only. Additional state information, which includes
      * status reported by the agent.
      * 
* @@ -1365,7 +1377,7 @@ public Builder setSubstateValue(int value) { } /** *
-     * Output-only. Additional state information, which includes
+     * Output only. Additional state information, which includes
      * status reported by the agent.
      * 
* @@ -1378,7 +1390,7 @@ public com.google.cloud.dataproc.v1.JobStatus.Substate getSubstate() { } /** *
-     * Output-only. Additional state information, which includes
+     * Output only. Additional state information, which includes
      * status reported by the agent.
      * 
* @@ -1395,7 +1407,7 @@ public Builder setSubstate(com.google.cloud.dataproc.v1.JobStatus.Substate value } /** *
-     * Output-only. Additional state information, which includes
+     * Output only. Additional state information, which includes
      * status reported by the agent.
      * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatusOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatusOrBuilder.java index 16874f77be46..b7524fc8f37f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatusOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobStatusOrBuilder.java @@ -9,7 +9,7 @@ public interface JobStatusOrBuilder extends /** *
-   * Output-only. A state message specifying the overall job state.
+   * Output only. A state message specifying the overall job state.
    * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -17,7 +17,7 @@ public interface JobStatusOrBuilder extends int getStateValue(); /** *
-   * Output-only. A state message specifying the overall job state.
+   * Output only. A state message specifying the overall job state.
    * 
* * .google.cloud.dataproc.v1.JobStatus.State state = 1; @@ -26,7 +26,7 @@ public interface JobStatusOrBuilder extends /** *
-   * Output-only. Optional job state details, such as an error
+   * Output only. Optional job state details, such as an error
    * description if the state is <code>ERROR</code>.
    * 
* @@ -35,7 +35,7 @@ public interface JobStatusOrBuilder extends java.lang.String getDetails(); /** *
-   * Output-only. Optional job state details, such as an error
+   * Output only. Optional job state details, such as an error
    * description if the state is <code>ERROR</code>.
    * 
* @@ -46,7 +46,7 @@ public interface JobStatusOrBuilder extends /** *
-   * Output-only. The time when this state was entered.
+   * Output only. The time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -54,7 +54,7 @@ public interface JobStatusOrBuilder extends boolean hasStateStartTime(); /** *
-   * Output-only. The time when this state was entered.
+   * Output only. The time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -62,7 +62,7 @@ public interface JobStatusOrBuilder extends com.google.protobuf.Timestamp getStateStartTime(); /** *
-   * Output-only. The time when this state was entered.
+   * Output only. The time when this state was entered.
    * 
* * .google.protobuf.Timestamp state_start_time = 6; @@ -71,7 +71,7 @@ public interface JobStatusOrBuilder extends /** *
-   * Output-only. Additional state information, which includes
+   * Output only. Additional state information, which includes
    * status reported by the agent.
    * 
* @@ -80,7 +80,7 @@ public interface JobStatusOrBuilder extends int getSubstateValue(); /** *
-   * Output-only. Additional state information, which includes
+   * Output only. Additional state information, which includes
    * status reported by the agent.
    * 
* diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobsProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobsProto.java index 13e14855d5ed..4a866a333f89 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobsProto.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/JobsProto.java @@ -277,7 +277,7 @@ public static void registerAllExtensions( "\n\005State\022\025\n\021STATE_UNSPECIFIED\020\000\022\007\n\003NEW\020\001\022" + "\016\n\nNEW_SAVING\020\002\022\r\n\tSUBMITTED\020\003\022\014\n\010ACCEPT" + "ED\020\004\022\013\n\007RUNNING\020\005\022\014\n\010FINISHED\020\006\022\n\n\006FAILE" + - "D\020\007\022\n\n\006KILLED\020\010\"\211\007\n\003Job\0229\n\treference\030\001 \001" + + "D\020\007\022\n\n\006KILLED\020\010\"\233\007\n\003Job\0229\n\treference\030\001 \001" + "(\0132&.google.cloud.dataproc.v1.JobReferen" + "ce\0229\n\tplacement\030\002 \001(\0132&.google.cloud.dat" + "aproc.v1.JobPlacement\0229\n\nhadoop_job\030\003 \001(" + @@ -298,58 +298,59 @@ public static void registerAllExtensions( "_control_files_uri\030\017 \001(\t\0229\n\006labels\030\022 \003(\013" + "2).google.cloud.dataproc.v1.Job.LabelsEn" + "try\022;\n\nscheduling\030\024 \001(\0132\'.google.cloud.d" + - "ataproc.v1.JobScheduling\032-\n\013LabelsEntry\022" + - "\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\n\n\010type_" + - "job\".\n\rJobScheduling\022\035\n\025max_failures_per" + - "_hour\030\001 \001(\005\"b\n\020SubmitJobRequest\022\022\n\nproje" + - "ct_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022*\n\003job\030\002 \001(\013" + - "2\035.google.cloud.dataproc.v1.Job\"C\n\rGetJo" + - "bRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003" + - " \001(\t\022\016\n\006job_id\030\002 \001(\t\"\220\002\n\017ListJobsRequest" + - "\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\006 \001(\t\022\021\n\t" + - "page_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\024\n\014c" + - "luster_name\030\004 \001(\t\022T\n\021job_state_matcher\030\005" + - " \001(\01629.google.cloud.dataproc.v1.ListJobs" + - "Request.JobStateMatcher\022\016\n\006filter\030\007 \001(\t\"" + - "6\n\017JobStateMatcher\022\007\n\003ALL\020\000\022\n\n\006ACTIVE\020\001\022" + - "\016\n\nNON_ACTIVE\020\002\"\243\001\n\020UpdateJobRequest\022\022\n\n" + - "project_id\030\001 \001(\t\022\016\n\006region\030\002 \001(\t\022\016\n\006job_" + - "id\030\003 \001(\t\022*\n\003job\030\004 \001(\0132\035.google.cloud.dat" + - "aproc.v1.Job\022/\n\013update_mask\030\005 \001(\0132\032.goog" + - "le.protobuf.FieldMask\"X\n\020ListJobsRespons" + - "e\022+\n\004jobs\030\001 \003(\0132\035.google.cloud.dataproc." + - "v1.Job\022\027\n\017next_page_token\030\002 \001(\t\"F\n\020Cance" + - "lJobRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006regio" + - "n\030\003 \001(\t\022\016\n\006job_id\030\002 \001(\t\"F\n\020DeleteJobRequ" + - "est\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022" + - "\016\n\006job_id\030\002 \001(\t2\266\007\n\rJobController\022\231\001\n\tSu" + - "bmitJob\022*.google.cloud.dataproc.v1.Submi" + - "tJobRequest\032\035.google.cloud.dataproc.v1.J" + - "ob\"A\202\323\344\223\002;\"6/v1/projects/{project_id}/re" + - "gions/{region}/jobs:submit:\001*\022\222\001\n\006GetJob" + - "\022\'.google.cloud.dataproc.v1.GetJobReques" + - "t\032\035.google.cloud.dataproc.v1.Job\"@\202\323\344\223\002:" + - "\0228/v1/projects/{project_id}/regions/{reg" + - "ion}/jobs/{job_id}\022\232\001\n\010ListJobs\022).google" + - ".cloud.dataproc.v1.ListJobsRequest\032*.goo" + - "gle.cloud.dataproc.v1.ListJobsResponse\"7" + - "\202\323\344\223\0021\022//v1/projects/{project_id}/region" + - "s/{region}/jobs\022\235\001\n\tUpdateJob\022*.google.c" + - "loud.dataproc.v1.UpdateJobRequest\032\035.goog" + - "le.cloud.dataproc.v1.Job\"E\202\323\344\223\002?28/v1/pr" + - "ojects/{project_id}/regions/{region}/job" + - "s/{job_id}:\003job\022\242\001\n\tCancelJob\022*.google.c" + - "loud.dataproc.v1.CancelJobRequest\032\035.goog" + - "le.cloud.dataproc.v1.Job\"J\202\323\344\223\002D\"?/v1/pr" + - "ojects/{project_id}/regions/{region}/job" + - "s/{job_id}:cancel:\001*\022\221\001\n\tDeleteJob\022*.goo" + - "gle.cloud.dataproc.v1.DeleteJobRequest\032\026" + - ".google.protobuf.Empty\"@\202\323\344\223\002:*8/v1/proj" + + "ataproc.v1.JobScheduling\022\020\n\010job_uuid\030\026 \001" + + "(\t\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030" + + "\002 \001(\t:\0028\001B\n\n\010type_job\".\n\rJobScheduling\022\035" + + "\n\025max_failures_per_hour\030\001 \001(\005\"v\n\020SubmitJ" + + "obRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030" + + "\003 \001(\t\022*\n\003job\030\002 \001(\0132\035.google.cloud.datapr" + + "oc.v1.Job\022\022\n\nrequest_id\030\004 \001(\t\"C\n\rGetJobR" + + "equest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001" + + "(\t\022\016\n\006job_id\030\002 \001(\t\"\220\002\n\017ListJobsRequest\022\022" + + "\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\006 \001(\t\022\021\n\tpa" + + "ge_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\024\n\014clu" + + "ster_name\030\004 \001(\t\022T\n\021job_state_matcher\030\005 \001" + + "(\01629.google.cloud.dataproc.v1.ListJobsRe" + + "quest.JobStateMatcher\022\016\n\006filter\030\007 \001(\t\"6\n" + + "\017JobStateMatcher\022\007\n\003ALL\020\000\022\n\n\006ACTIVE\020\001\022\016\n" + + "\nNON_ACTIVE\020\002\"\243\001\n\020UpdateJobRequest\022\022\n\npr" + + "oject_id\030\001 \001(\t\022\016\n\006region\030\002 \001(\t\022\016\n\006job_id" + + "\030\003 \001(\t\022*\n\003job\030\004 \001(\0132\035.google.cloud.datap" + + "roc.v1.Job\022/\n\013update_mask\030\005 \001(\0132\032.google" + + ".protobuf.FieldMask\"X\n\020ListJobsResponse\022" + + "+\n\004jobs\030\001 \003(\0132\035.google.cloud.dataproc.v1" + + ".Job\022\027\n\017next_page_token\030\002 \001(\t\"F\n\020CancelJ" + + "obRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030" + + "\003 \001(\t\022\016\n\006job_id\030\002 \001(\t\"F\n\020DeleteJobReques" + + "t\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t\022\016\n" + + "\006job_id\030\002 \001(\t2\266\007\n\rJobController\022\231\001\n\tSubm" + + "itJob\022*.google.cloud.dataproc.v1.SubmitJ" + + "obRequest\032\035.google.cloud.dataproc.v1.Job" + + "\"A\202\323\344\223\002;\"6/v1/projects/{project_id}/regi" + + "ons/{region}/jobs:submit:\001*\022\222\001\n\006GetJob\022\'" + + ".google.cloud.dataproc.v1.GetJobRequest\032" + + "\035.google.cloud.dataproc.v1.Job\"@\202\323\344\223\002:\0228" + + "/v1/projects/{project_id}/regions/{regio" + + "n}/jobs/{job_id}\022\232\001\n\010ListJobs\022).google.c" + + "loud.dataproc.v1.ListJobsRequest\032*.googl" + + "e.cloud.dataproc.v1.ListJobsResponse\"7\202\323" + + "\344\223\0021\022//v1/projects/{project_id}/regions/" + + "{region}/jobs\022\235\001\n\tUpdateJob\022*.google.clo" + + "ud.dataproc.v1.UpdateJobRequest\032\035.google" + + ".cloud.dataproc.v1.Job\"E\202\323\344\223\002?28/v1/proj" + "ects/{project_id}/regions/{region}/jobs/" + - "{job_id}Bm\n\034com.google.cloud.dataproc.v1" + - "B\tJobsProtoP\001Z@google.golang.org/genprot" + - "o/googleapis/cloud/dataproc/v1;dataprocb" + - "\006proto3" + "{job_id}:\003job\022\242\001\n\tCancelJob\022*.google.clo" + + "ud.dataproc.v1.CancelJobRequest\032\035.google" + + ".cloud.dataproc.v1.Job\"J\202\323\344\223\002D\"?/v1/proj" + + "ects/{project_id}/regions/{region}/jobs/" + + "{job_id}:cancel:\001*\022\221\001\n\tDeleteJob\022*.googl" + + "e.cloud.dataproc.v1.DeleteJobRequest\032\026.g" + + "oogle.protobuf.Empty\"@\202\323\344\223\002:*8/v1/projec" + + "ts/{project_id}/regions/{region}/jobs/{j" + + "ob_id}Bm\n\034com.google.cloud.dataproc.v1B\t" + + "JobsProtoP\001Z@google.golang.org/genproto/" + + "googleapis/cloud/dataproc/v1;dataprocb\006p" + + "roto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -504,7 +505,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_Job_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_Job_descriptor, - new java.lang.String[] { "Reference", "Placement", "HadoopJob", "SparkJob", "PysparkJob", "HiveJob", "PigJob", "SparkSqlJob", "Status", "StatusHistory", "YarnApplications", "DriverOutputResourceUri", "DriverControlFilesUri", "Labels", "Scheduling", "TypeJob", }); + new java.lang.String[] { "Reference", "Placement", "HadoopJob", "SparkJob", "PysparkJob", "HiveJob", "PigJob", "SparkSqlJob", "Status", "StatusHistory", "YarnApplications", "DriverOutputResourceUri", "DriverControlFilesUri", "Labels", "Scheduling", "JobUuid", "TypeJob", }); internal_static_google_cloud_dataproc_v1_Job_LabelsEntry_descriptor = internal_static_google_cloud_dataproc_v1_Job_descriptor.getNestedTypes().get(0); internal_static_google_cloud_dataproc_v1_Job_LabelsEntry_fieldAccessorTable = new @@ -522,7 +523,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_SubmitJobRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1_SubmitJobRequest_descriptor, - new java.lang.String[] { "ProjectId", "Region", "Job", }); + new java.lang.String[] { "ProjectId", "Region", "Job", "RequestId", }); internal_static_google_cloud_dataproc_v1_GetJobRequest_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_dataproc_v1_GetJobRequest_fieldAccessorTable = new diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponse.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponse.java index ec9e2207385c..4cd69214d2e8 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponse.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponse.java @@ -103,7 +103,7 @@ private ListClustersResponse( private java.util.List clusters_; /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -113,7 +113,7 @@ public java.util.List getClustersList() { } /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -124,7 +124,7 @@ public java.util.List getClustersList() { } /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -134,7 +134,7 @@ public int getClustersCount() { } /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -144,7 +144,7 @@ public com.google.cloud.dataproc.v1.Cluster getClusters(int index) { } /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -158,7 +158,7 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder( private volatile java.lang.Object nextPageToken_; /** *
-   * Output-only. This token is included in the response if there are more
+   * Output only. This token is included in the response if there are more
    * results to fetch. To fetch additional results, provide this value as the
    * `page_token` in a subsequent `ListClustersRequest`.
    * 
@@ -179,7 +179,7 @@ public java.lang.String getNextPageToken() { } /** *
-   * Output-only. This token is included in the response if there are more
+   * Output only. This token is included in the response if there are more
    * results to fetch. To fetch additional results, provide this value as the
    * `page_token` in a subsequent `ListClustersRequest`.
    * 
@@ -580,7 +580,7 @@ private void ensureClustersIsMutable() { /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -594,7 +594,7 @@ public java.util.List getClustersList() { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -608,7 +608,7 @@ public int getClustersCount() { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -622,7 +622,7 @@ public com.google.cloud.dataproc.v1.Cluster getClusters(int index) { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -643,7 +643,7 @@ public Builder setClusters( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -661,7 +661,7 @@ public Builder setClusters( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -681,7 +681,7 @@ public Builder addClusters(com.google.cloud.dataproc.v1.Cluster value) { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -702,7 +702,7 @@ public Builder addClusters( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -720,7 +720,7 @@ public Builder addClusters( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -738,7 +738,7 @@ public Builder addClusters( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -757,7 +757,7 @@ public Builder addAllClusters( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -774,7 +774,7 @@ public Builder clearClusters() { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -791,7 +791,7 @@ public Builder removeClusters(int index) { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -802,7 +802,7 @@ public com.google.cloud.dataproc.v1.Cluster.Builder getClustersBuilder( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -816,7 +816,7 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -831,7 +831,7 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -842,7 +842,7 @@ public com.google.cloud.dataproc.v1.Cluster.Builder addClustersBuilder() { } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -854,7 +854,7 @@ public com.google.cloud.dataproc.v1.Cluster.Builder addClustersBuilder( } /** *
-     * Output-only. The clusters in the project.
+     * Output only. The clusters in the project.
      * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -881,7 +881,7 @@ public com.google.cloud.dataproc.v1.Cluster.Builder addClustersBuilder( private java.lang.Object nextPageToken_ = ""; /** *
-     * Output-only. This token is included in the response if there are more
+     * Output only. This token is included in the response if there are more
      * results to fetch. To fetch additional results, provide this value as the
      * `page_token` in a subsequent `ListClustersRequest`.
      * 
@@ -902,7 +902,7 @@ public java.lang.String getNextPageToken() { } /** *
-     * Output-only. This token is included in the response if there are more
+     * Output only. This token is included in the response if there are more
      * results to fetch. To fetch additional results, provide this value as the
      * `page_token` in a subsequent `ListClustersRequest`.
      * 
@@ -924,7 +924,7 @@ public java.lang.String getNextPageToken() { } /** *
-     * Output-only. This token is included in the response if there are more
+     * Output only. This token is included in the response if there are more
      * results to fetch. To fetch additional results, provide this value as the
      * `page_token` in a subsequent `ListClustersRequest`.
      * 
@@ -943,7 +943,7 @@ public Builder setNextPageToken( } /** *
-     * Output-only. This token is included in the response if there are more
+     * Output only. This token is included in the response if there are more
      * results to fetch. To fetch additional results, provide this value as the
      * `page_token` in a subsequent `ListClustersRequest`.
      * 
@@ -958,7 +958,7 @@ public Builder clearNextPageToken() { } /** *
-     * Output-only. This token is included in the response if there are more
+     * Output only. This token is included in the response if there are more
      * results to fetch. To fetch additional results, provide this value as the
      * `page_token` in a subsequent `ListClustersRequest`.
      * 
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponseOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponseOrBuilder.java index 78c1fad70a64..8b3ae91fbb07 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponseOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponseOrBuilder.java @@ -9,7 +9,7 @@ public interface ListClustersResponseOrBuilder extends /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -18,7 +18,7 @@ public interface ListClustersResponseOrBuilder extends getClustersList(); /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -26,7 +26,7 @@ public interface ListClustersResponseOrBuilder extends com.google.cloud.dataproc.v1.Cluster getClusters(int index); /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -34,7 +34,7 @@ public interface ListClustersResponseOrBuilder extends int getClustersCount(); /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -43,7 +43,7 @@ public interface ListClustersResponseOrBuilder extends getClustersOrBuilderList(); /** *
-   * Output-only. The clusters in the project.
+   * Output only. The clusters in the project.
    * 
* * repeated .google.cloud.dataproc.v1.Cluster clusters = 1; @@ -53,7 +53,7 @@ com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder( /** *
-   * Output-only. This token is included in the response if there are more
+   * Output only. This token is included in the response if there are more
    * results to fetch. To fetch additional results, provide this value as the
    * `page_token` in a subsequent `ListClustersRequest`.
    * 
@@ -63,7 +63,7 @@ com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder( java.lang.String getNextPageToken(); /** *
-   * Output-only. This token is included in the response if there are more
+   * Output only. This token is included in the response if there are more
    * results to fetch. To fetch additional results, provide this value as the
    * `page_token` in a subsequent `ListClustersRequest`.
    * 
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponse.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponse.java index 03b78e108a52..b46d18b76798 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponse.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponse.java @@ -103,7 +103,7 @@ private ListJobsResponse( private java.util.List jobs_; /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -113,7 +113,7 @@ public java.util.List getJobsList() { } /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -124,7 +124,7 @@ public java.util.List getJobsList() { } /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -134,7 +134,7 @@ public int getJobsCount() { } /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -144,7 +144,7 @@ public com.google.cloud.dataproc.v1.Job getJobs(int index) { } /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -580,7 +580,7 @@ private void ensureJobsIsMutable() { /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -594,7 +594,7 @@ public java.util.List getJobsList() { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -608,7 +608,7 @@ public int getJobsCount() { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -622,7 +622,7 @@ public com.google.cloud.dataproc.v1.Job getJobs(int index) { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -643,7 +643,7 @@ public Builder setJobs( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -661,7 +661,7 @@ public Builder setJobs( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -681,7 +681,7 @@ public Builder addJobs(com.google.cloud.dataproc.v1.Job value) { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -702,7 +702,7 @@ public Builder addJobs( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -720,7 +720,7 @@ public Builder addJobs( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -738,7 +738,7 @@ public Builder addJobs( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -757,7 +757,7 @@ public Builder addAllJobs( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -774,7 +774,7 @@ public Builder clearJobs() { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -791,7 +791,7 @@ public Builder removeJobs(int index) { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -802,7 +802,7 @@ public com.google.cloud.dataproc.v1.Job.Builder getJobsBuilder( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -816,7 +816,7 @@ public com.google.cloud.dataproc.v1.JobOrBuilder getJobsOrBuilder( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -831,7 +831,7 @@ public com.google.cloud.dataproc.v1.JobOrBuilder getJobsOrBuilder( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -842,7 +842,7 @@ public com.google.cloud.dataproc.v1.Job.Builder addJobsBuilder() { } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -854,7 +854,7 @@ public com.google.cloud.dataproc.v1.Job.Builder addJobsBuilder( } /** *
-     * Output-only. Jobs list.
+     * Output only. Jobs list.
      * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponseOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponseOrBuilder.java index cdc67fa0f4f6..5a6bd3edad39 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponseOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListJobsResponseOrBuilder.java @@ -9,7 +9,7 @@ public interface ListJobsResponseOrBuilder extends /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -18,7 +18,7 @@ public interface ListJobsResponseOrBuilder extends getJobsList(); /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -26,7 +26,7 @@ public interface ListJobsResponseOrBuilder extends com.google.cloud.dataproc.v1.Job getJobs(int index); /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -34,7 +34,7 @@ public interface ListJobsResponseOrBuilder extends int getJobsCount(); /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; @@ -43,7 +43,7 @@ public interface ListJobsResponseOrBuilder extends getJobsOrBuilderList(); /** *
-   * Output-only. Jobs list.
+   * Output only. Jobs list.
    * 
* * repeated .google.cloud.dataproc.v1.Job jobs = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequest.java new file mode 100644 index 000000000000..d024648a1c1e --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequest.java @@ -0,0 +1,827 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A request to list workflow templates in a project.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ListWorkflowTemplatesRequest} + */ +public final class ListWorkflowTemplatesRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) + ListWorkflowTemplatesRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use ListWorkflowTemplatesRequest.newBuilder() to construct. + private ListWorkflowTemplatesRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ListWorkflowTemplatesRequest() { + parent_ = ""; + pageSize_ = 0; + pageToken_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListWorkflowTemplatesRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + parent_ = s; + break; + } + case 16: { + + pageSize_ = input.readInt32(); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + pageToken_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.class, com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + private volatile java.lang.Object parent_; + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + public com.google.protobuf.ByteString + getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 2; + private int pageSize_; + /** + *
+   * Optional. The maximum number of results to return in each response.
+   * 
+ * + * int32 page_size = 2; + */ + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 3; + private volatile java.lang.Object pageToken_; + /** + *
+   * Optional. The page token, returned by a previous call, to request the
+   * next page of results.
+   * 
+ * + * string page_token = 3; + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + *
+   * Optional. The page token, returned by a previous call, to request the
+   * next page of results.
+   * 
+ * + * string page_token = 3; + */ + public com.google.protobuf.ByteString + getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getParentBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (pageSize_ != 0) { + output.writeInt32(2, pageSize_); + } + if (!getPageTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getParentBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, pageSize_); + } + if (!getPageTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest other = (com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) obj; + + boolean result = true; + result = result && getParent() + .equals(other.getParent()); + result = result && (getPageSize() + == other.getPageSize()); + result = result && getPageToken() + .equals(other.getPageToken()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to list workflow templates in a project.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ListWorkflowTemplatesRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.class, com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + parent_ = ""; + + pageSize_ = 0; + + pageToken_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest build() { + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest buildPartial() { + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest result = new com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest(this); + result.parent_ = parent_; + result.pageSize_ = pageSize_; + result.pageToken_ = pageToken_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest other) { + if (other == com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest.getDefaultInstance()) return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + onChanged(); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object parent_ = ""; + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public com.google.protobuf.ByteString + getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder setParent( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + parent_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder clearParent() { + + parent_ = getDefaultInstance().getParent(); + onChanged(); + return this; + } + /** + *
+     * Required. The "resource name" of the region, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}`
+     * 
+ * + * string parent = 1; + */ + public Builder setParentBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + parent_ = value; + onChanged(); + return this; + } + + private int pageSize_ ; + /** + *
+     * Optional. The maximum number of results to return in each response.
+     * 
+ * + * int32 page_size = 2; + */ + public int getPageSize() { + return pageSize_; + } + /** + *
+     * Optional. The maximum number of results to return in each response.
+     * 
+ * + * int32 page_size = 2; + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The maximum number of results to return in each response.
+     * 
+ * + * int32 page_size = 2; + */ + public Builder clearPageSize() { + + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + *
+     * Optional. The page token, returned by a previous call, to request the
+     * next page of results.
+     * 
+ * + * string page_token = 3; + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. The page token, returned by a previous call, to request the
+     * next page of results.
+     * 
+ * + * string page_token = 3; + */ + public com.google.protobuf.ByteString + getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. The page token, returned by a previous call, to request the
+     * next page of results.
+     * 
+ * + * string page_token = 3; + */ + public Builder setPageToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + pageToken_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The page token, returned by a previous call, to request the
+     * next page of results.
+     * 
+ * + * string page_token = 3; + */ + public Builder clearPageToken() { + + pageToken_ = getDefaultInstance().getPageToken(); + onChanged(); + return this; + } + /** + *
+     * Optional. The page token, returned by a previous call, to request the
+     * next page of results.
+     * 
+ * + * string page_token = 3; + */ + public Builder setPageTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + pageToken_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) + private static final com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest(); + } + + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListWorkflowTemplatesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListWorkflowTemplatesRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequestOrBuilder.java new file mode 100644 index 000000000000..b0ddef0454a4 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesRequestOrBuilder.java @@ -0,0 +1,60 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ListWorkflowTemplatesRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ListWorkflowTemplatesRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + java.lang.String getParent(); + /** + *
+   * Required. The "resource name" of the region, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}`
+   * 
+ * + * string parent = 1; + */ + com.google.protobuf.ByteString + getParentBytes(); + + /** + *
+   * Optional. The maximum number of results to return in each response.
+   * 
+ * + * int32 page_size = 2; + */ + int getPageSize(); + + /** + *
+   * Optional. The page token, returned by a previous call, to request the
+   * next page of results.
+   * 
+ * + * string page_token = 3; + */ + java.lang.String getPageToken(); + /** + *
+   * Optional. The page token, returned by a previous call, to request the
+   * next page of results.
+   * 
+ * + * string page_token = 3; + */ + com.google.protobuf.ByteString + getPageTokenBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponse.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponse.java new file mode 100644 index 000000000000..2336213b8a98 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponse.java @@ -0,0 +1,1031 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A response to a request to list workflow templates in a project.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ListWorkflowTemplatesResponse} + */ +public final class ListWorkflowTemplatesResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) + ListWorkflowTemplatesResponseOrBuilder { +private static final long serialVersionUID = 0L; + // Use ListWorkflowTemplatesResponse.newBuilder() to construct. + private ListWorkflowTemplatesResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ListWorkflowTemplatesResponse() { + templates_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListWorkflowTemplatesResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + templates_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + templates_.add( + input.readMessage(com.google.cloud.dataproc.v1.WorkflowTemplate.parser(), extensionRegistry)); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + nextPageToken_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + templates_ = java.util.Collections.unmodifiableList(templates_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.class, com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.Builder.class); + } + + private int bitField0_; + public static final int TEMPLATES_FIELD_NUMBER = 1; + private java.util.List templates_; + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public java.util.List getTemplatesList() { + return templates_; + } + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public java.util.List + getTemplatesOrBuilderList() { + return templates_; + } + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public int getTemplatesCount() { + return templates_.size(); + } + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplates(int index) { + return templates_.get(index); + } + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplatesOrBuilder( + int index) { + return templates_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + private volatile java.lang.Object nextPageToken_; + /** + *
+   * Output only. This token is included in the response if there are more
+   * results to fetch. To fetch additional results, provide this value as the
+   * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+   * 
+ * + * string next_page_token = 2; + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + *
+   * Output only. This token is included in the response if there are more
+   * results to fetch. To fetch additional results, provide this value as the
+   * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+   * 
+ * + * string next_page_token = 2; + */ + public com.google.protobuf.ByteString + getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < templates_.size(); i++) { + output.writeMessage(1, templates_.get(i)); + } + if (!getNextPageTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < templates_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, templates_.get(i)); + } + if (!getNextPageTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse other = (com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) obj; + + boolean result = true; + result = result && getTemplatesList() + .equals(other.getTemplatesList()); + result = result && getNextPageToken() + .equals(other.getNextPageToken()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getTemplatesCount() > 0) { + hash = (37 * hash) + TEMPLATES_FIELD_NUMBER; + hash = (53 * hash) + getTemplatesList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A response to a request to list workflow templates in a project.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ListWorkflowTemplatesResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.class, com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getTemplatesFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (templatesBuilder_ == null) { + templates_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + templatesBuilder_.clear(); + } + nextPageToken_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse build() { + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse buildPartial() { + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse result = new com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (templatesBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + templates_ = java.util.Collections.unmodifiableList(templates_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.templates_ = templates_; + } else { + result.templates_ = templatesBuilder_.build(); + } + result.nextPageToken_ = nextPageToken_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) { + return mergeFrom((com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse other) { + if (other == com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.getDefaultInstance()) return this; + if (templatesBuilder_ == null) { + if (!other.templates_.isEmpty()) { + if (templates_.isEmpty()) { + templates_ = other.templates_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTemplatesIsMutable(); + templates_.addAll(other.templates_); + } + onChanged(); + } + } else { + if (!other.templates_.isEmpty()) { + if (templatesBuilder_.isEmpty()) { + templatesBuilder_.dispose(); + templatesBuilder_ = null; + templates_ = other.templates_; + bitField0_ = (bitField0_ & ~0x00000001); + templatesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getTemplatesFieldBuilder() : null; + } else { + templatesBuilder_.addAllMessages(other.templates_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List templates_ = + java.util.Collections.emptyList(); + private void ensureTemplatesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + templates_ = new java.util.ArrayList(templates_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> templatesBuilder_; + + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public java.util.List getTemplatesList() { + if (templatesBuilder_ == null) { + return java.util.Collections.unmodifiableList(templates_); + } else { + return templatesBuilder_.getMessageList(); + } + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public int getTemplatesCount() { + if (templatesBuilder_ == null) { + return templates_.size(); + } else { + return templatesBuilder_.getCount(); + } + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplates(int index) { + if (templatesBuilder_ == null) { + return templates_.get(index); + } else { + return templatesBuilder_.getMessage(index); + } + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder setTemplates( + int index, com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templatesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTemplatesIsMutable(); + templates_.set(index, value); + onChanged(); + } else { + templatesBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder setTemplates( + int index, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder builderForValue) { + if (templatesBuilder_ == null) { + ensureTemplatesIsMutable(); + templates_.set(index, builderForValue.build()); + onChanged(); + } else { + templatesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder addTemplates(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templatesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTemplatesIsMutable(); + templates_.add(value); + onChanged(); + } else { + templatesBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder addTemplates( + int index, com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templatesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTemplatesIsMutable(); + templates_.add(index, value); + onChanged(); + } else { + templatesBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder addTemplates( + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder builderForValue) { + if (templatesBuilder_ == null) { + ensureTemplatesIsMutable(); + templates_.add(builderForValue.build()); + onChanged(); + } else { + templatesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder addTemplates( + int index, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder builderForValue) { + if (templatesBuilder_ == null) { + ensureTemplatesIsMutable(); + templates_.add(index, builderForValue.build()); + onChanged(); + } else { + templatesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder addAllTemplates( + java.lang.Iterable values) { + if (templatesBuilder_ == null) { + ensureTemplatesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, templates_); + onChanged(); + } else { + templatesBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder clearTemplates() { + if (templatesBuilder_ == null) { + templates_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + templatesBuilder_.clear(); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public Builder removeTemplates(int index) { + if (templatesBuilder_ == null) { + ensureTemplatesIsMutable(); + templates_.remove(index); + onChanged(); + } else { + templatesBuilder_.remove(index); + } + return this; + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate.Builder getTemplatesBuilder( + int index) { + return getTemplatesFieldBuilder().getBuilder(index); + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplatesOrBuilder( + int index) { + if (templatesBuilder_ == null) { + return templates_.get(index); } else { + return templatesBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public java.util.List + getTemplatesOrBuilderList() { + if (templatesBuilder_ != null) { + return templatesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(templates_); + } + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate.Builder addTemplatesBuilder() { + return getTemplatesFieldBuilder().addBuilder( + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance()); + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate.Builder addTemplatesBuilder( + int index) { + return getTemplatesFieldBuilder().addBuilder( + index, com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance()); + } + /** + *
+     * Output only. WorkflowTemplates list.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + public java.util.List + getTemplatesBuilderList() { + return getTemplatesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> + getTemplatesFieldBuilder() { + if (templatesBuilder_ == null) { + templatesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder>( + templates_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + templates_ = null; + } + return templatesBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + *
+     * Output only. This token is included in the response if there are more
+     * results to fetch. To fetch additional results, provide this value as the
+     * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+     * 
+ * + * string next_page_token = 2; + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. This token is included in the response if there are more
+     * results to fetch. To fetch additional results, provide this value as the
+     * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+     * 
+ * + * string next_page_token = 2; + */ + public com.google.protobuf.ByteString + getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. This token is included in the response if there are more
+     * results to fetch. To fetch additional results, provide this value as the
+     * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+     * 
+ * + * string next_page_token = 2; + */ + public Builder setNextPageToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + nextPageToken_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. This token is included in the response if there are more
+     * results to fetch. To fetch additional results, provide this value as the
+     * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+     * 
+ * + * string next_page_token = 2; + */ + public Builder clearNextPageToken() { + + nextPageToken_ = getDefaultInstance().getNextPageToken(); + onChanged(); + return this; + } + /** + *
+     * Output only. This token is included in the response if there are more
+     * results to fetch. To fetch additional results, provide this value as the
+     * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+     * 
+ * + * string next_page_token = 2; + */ + public Builder setNextPageTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + nextPageToken_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) + private static final com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse(); + } + + public static com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListWorkflowTemplatesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListWorkflowTemplatesResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponseOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponseOrBuilder.java new file mode 100644 index 000000000000..438d0514f906 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListWorkflowTemplatesResponseOrBuilder.java @@ -0,0 +1,75 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ListWorkflowTemplatesResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ListWorkflowTemplatesResponse) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + java.util.List + getTemplatesList(); + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + com.google.cloud.dataproc.v1.WorkflowTemplate getTemplates(int index); + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + int getTemplatesCount(); + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + java.util.List + getTemplatesOrBuilderList(); + /** + *
+   * Output only. WorkflowTemplates list.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowTemplate templates = 1; + */ + com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplatesOrBuilder( + int index); + + /** + *
+   * Output only. This token is included in the response if there are more
+   * results to fetch. To fetch additional results, provide this value as the
+   * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+   * 
+ * + * string next_page_token = 2; + */ + java.lang.String getNextPageToken(); + /** + *
+   * Output only. This token is included in the response if there are more
+   * results to fetch. To fetch additional results, provide this value as the
+   * page_token in a subsequent <code>ListWorkflowTemplatesRequest</code>.
+   * 
+ * + * string next_page_token = 2; + */ + com.google.protobuf.ByteString + getNextPageTokenBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedCluster.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedCluster.java new file mode 100644 index 000000000000..95327b6c0f36 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedCluster.java @@ -0,0 +1,1222 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * Cluster that is managed by the workflow.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ManagedCluster} + */ +public final class ManagedCluster extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ManagedCluster) + ManagedClusterOrBuilder { +private static final long serialVersionUID = 0L; + // Use ManagedCluster.newBuilder() to construct. + private ManagedCluster(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ManagedCluster() { + clusterName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ManagedCluster( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + clusterName_ = s; + break; + } + case 26: { + com.google.cloud.dataproc.v1.ClusterConfig.Builder subBuilder = null; + if (config_ != null) { + subBuilder = config_.toBuilder(); + } + config_ = input.readMessage(com.google.cloud.dataproc.v1.ClusterConfig.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(config_); + config_ = subBuilder.buildPartial(); + } + + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + labels_ = com.google.protobuf.MapField.newMapField( + LabelsDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000004; + } + com.google.protobuf.MapEntry + labels__ = input.readMessage( + LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + labels_.getMutableMap().put( + labels__.getKey(), labels__.getValue()); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ManagedCluster_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 4: + return internalGetLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ManagedCluster_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ManagedCluster.class, com.google.cloud.dataproc.v1.ManagedCluster.Builder.class); + } + + private int bitField0_; + public static final int CLUSTER_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object clusterName_; + /** + *
+   * Required. The cluster name prefix. A unique cluster name will be formed by
+   * appending a random suffix.
+   * The name must contain only lower-case letters (a-z), numbers (0-9),
+   * and hyphens (-). Must begin with a letter. Cannot begin or end with
+   * hyphen. Must consist of between 2 and 35 characters.
+   * 
+ * + * string cluster_name = 2; + */ + public java.lang.String getClusterName() { + java.lang.Object ref = clusterName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterName_ = s; + return s; + } + } + /** + *
+   * Required. The cluster name prefix. A unique cluster name will be formed by
+   * appending a random suffix.
+   * The name must contain only lower-case letters (a-z), numbers (0-9),
+   * and hyphens (-). Must begin with a letter. Cannot begin or end with
+   * hyphen. Must consist of between 2 and 35 characters.
+   * 
+ * + * string cluster_name = 2; + */ + public com.google.protobuf.ByteString + getClusterNameBytes() { + java.lang.Object ref = clusterName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CONFIG_FIELD_NUMBER = 3; + private com.google.cloud.dataproc.v1.ClusterConfig config_; + /** + *
+   * Required. The cluster configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public boolean hasConfig() { + return config_ != null; + } + /** + *
+   * Required. The cluster configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public com.google.cloud.dataproc.v1.ClusterConfig getConfig() { + return config_ == null ? com.google.cloud.dataproc.v1.ClusterConfig.getDefaultInstance() : config_; + } + /** + *
+   * Required. The cluster configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public com.google.cloud.dataproc.v1.ClusterConfigOrBuilder getConfigOrBuilder() { + return getConfig(); + } + + public static final int LABELS_FIELD_NUMBER = 4; + private static final class LabelsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ManagedCluster_LabelsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> labels_; + private com.google.protobuf.MapField + internalGetLabels() { + if (labels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + return labels_; + } + + public int getLabelsCount() { + return internalGetLabels().getMap().size(); + } + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + + public boolean containsLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabels().getMap().containsKey(key); + } + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabels() { + return getLabelsMap(); + } + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + + public java.util.Map getLabelsMap() { + return internalGetLabels().getMap(); + } + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + + public java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + + public java.lang.String getLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getClusterNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, clusterName_); + } + if (config_ != null) { + output.writeMessage(3, getConfig()); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetLabels(), + LabelsDefaultEntryHolder.defaultEntry, + 4); + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getClusterNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, clusterName_); + } + if (config_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getConfig()); + } + for (java.util.Map.Entry entry + : internalGetLabels().getMap().entrySet()) { + com.google.protobuf.MapEntry + labels__ = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, labels__); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ManagedCluster)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ManagedCluster other = (com.google.cloud.dataproc.v1.ManagedCluster) obj; + + boolean result = true; + result = result && getClusterName() + .equals(other.getClusterName()); + result = result && (hasConfig() == other.hasConfig()); + if (hasConfig()) { + result = result && getConfig() + .equals(other.getConfig()); + } + result = result && internalGetLabels().equals( + other.internalGetLabels()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + CLUSTER_NAME_FIELD_NUMBER; + hash = (53 * hash) + getClusterName().hashCode(); + if (hasConfig()) { + hash = (37 * hash) + CONFIG_FIELD_NUMBER; + hash = (53 * hash) + getConfig().hashCode(); + } + if (!internalGetLabels().getMap().isEmpty()) { + hash = (37 * hash) + LABELS_FIELD_NUMBER; + hash = (53 * hash) + internalGetLabels().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ManagedCluster parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ManagedCluster prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Cluster that is managed by the workflow.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ManagedCluster} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ManagedCluster) + com.google.cloud.dataproc.v1.ManagedClusterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ManagedCluster_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 4: + return internalGetLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 4: + return internalGetMutableLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ManagedCluster_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ManagedCluster.class, com.google.cloud.dataproc.v1.ManagedCluster.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ManagedCluster.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + clusterName_ = ""; + + if (configBuilder_ == null) { + config_ = null; + } else { + config_ = null; + configBuilder_ = null; + } + internalGetMutableLabels().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ManagedCluster_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ManagedCluster getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ManagedCluster build() { + com.google.cloud.dataproc.v1.ManagedCluster result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ManagedCluster buildPartial() { + com.google.cloud.dataproc.v1.ManagedCluster result = new com.google.cloud.dataproc.v1.ManagedCluster(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.clusterName_ = clusterName_; + if (configBuilder_ == null) { + result.config_ = config_; + } else { + result.config_ = configBuilder_.build(); + } + result.labels_ = internalGetLabels(); + result.labels_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ManagedCluster) { + return mergeFrom((com.google.cloud.dataproc.v1.ManagedCluster)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ManagedCluster other) { + if (other == com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance()) return this; + if (!other.getClusterName().isEmpty()) { + clusterName_ = other.clusterName_; + onChanged(); + } + if (other.hasConfig()) { + mergeConfig(other.getConfig()); + } + internalGetMutableLabels().mergeFrom( + other.internalGetLabels()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ManagedCluster parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ManagedCluster) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object clusterName_ = ""; + /** + *
+     * Required. The cluster name prefix. A unique cluster name will be formed by
+     * appending a random suffix.
+     * The name must contain only lower-case letters (a-z), numbers (0-9),
+     * and hyphens (-). Must begin with a letter. Cannot begin or end with
+     * hyphen. Must consist of between 2 and 35 characters.
+     * 
+ * + * string cluster_name = 2; + */ + public java.lang.String getClusterName() { + java.lang.Object ref = clusterName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The cluster name prefix. A unique cluster name will be formed by
+     * appending a random suffix.
+     * The name must contain only lower-case letters (a-z), numbers (0-9),
+     * and hyphens (-). Must begin with a letter. Cannot begin or end with
+     * hyphen. Must consist of between 2 and 35 characters.
+     * 
+ * + * string cluster_name = 2; + */ + public com.google.protobuf.ByteString + getClusterNameBytes() { + java.lang.Object ref = clusterName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The cluster name prefix. A unique cluster name will be formed by
+     * appending a random suffix.
+     * The name must contain only lower-case letters (a-z), numbers (0-9),
+     * and hyphens (-). Must begin with a letter. Cannot begin or end with
+     * hyphen. Must consist of between 2 and 35 characters.
+     * 
+ * + * string cluster_name = 2; + */ + public Builder setClusterName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + clusterName_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The cluster name prefix. A unique cluster name will be formed by
+     * appending a random suffix.
+     * The name must contain only lower-case letters (a-z), numbers (0-9),
+     * and hyphens (-). Must begin with a letter. Cannot begin or end with
+     * hyphen. Must consist of between 2 and 35 characters.
+     * 
+ * + * string cluster_name = 2; + */ + public Builder clearClusterName() { + + clusterName_ = getDefaultInstance().getClusterName(); + onChanged(); + return this; + } + /** + *
+     * Required. The cluster name prefix. A unique cluster name will be formed by
+     * appending a random suffix.
+     * The name must contain only lower-case letters (a-z), numbers (0-9),
+     * and hyphens (-). Must begin with a letter. Cannot begin or end with
+     * hyphen. Must consist of between 2 and 35 characters.
+     * 
+ * + * string cluster_name = 2; + */ + public Builder setClusterNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + clusterName_ = value; + onChanged(); + return this; + } + + private com.google.cloud.dataproc.v1.ClusterConfig config_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterConfig, com.google.cloud.dataproc.v1.ClusterConfig.Builder, com.google.cloud.dataproc.v1.ClusterConfigOrBuilder> configBuilder_; + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public boolean hasConfig() { + return configBuilder_ != null || config_ != null; + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public com.google.cloud.dataproc.v1.ClusterConfig getConfig() { + if (configBuilder_ == null) { + return config_ == null ? com.google.cloud.dataproc.v1.ClusterConfig.getDefaultInstance() : config_; + } else { + return configBuilder_.getMessage(); + } + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public Builder setConfig(com.google.cloud.dataproc.v1.ClusterConfig value) { + if (configBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + config_ = value; + onChanged(); + } else { + configBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public Builder setConfig( + com.google.cloud.dataproc.v1.ClusterConfig.Builder builderForValue) { + if (configBuilder_ == null) { + config_ = builderForValue.build(); + onChanged(); + } else { + configBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public Builder mergeConfig(com.google.cloud.dataproc.v1.ClusterConfig value) { + if (configBuilder_ == null) { + if (config_ != null) { + config_ = + com.google.cloud.dataproc.v1.ClusterConfig.newBuilder(config_).mergeFrom(value).buildPartial(); + } else { + config_ = value; + } + onChanged(); + } else { + configBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public Builder clearConfig() { + if (configBuilder_ == null) { + config_ = null; + onChanged(); + } else { + config_ = null; + configBuilder_ = null; + } + + return this; + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public com.google.cloud.dataproc.v1.ClusterConfig.Builder getConfigBuilder() { + + onChanged(); + return getConfigFieldBuilder().getBuilder(); + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + public com.google.cloud.dataproc.v1.ClusterConfigOrBuilder getConfigOrBuilder() { + if (configBuilder_ != null) { + return configBuilder_.getMessageOrBuilder(); + } else { + return config_ == null ? + com.google.cloud.dataproc.v1.ClusterConfig.getDefaultInstance() : config_; + } + } + /** + *
+     * Required. The cluster configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterConfig, com.google.cloud.dataproc.v1.ClusterConfig.Builder, com.google.cloud.dataproc.v1.ClusterConfigOrBuilder> + getConfigFieldBuilder() { + if (configBuilder_ == null) { + configBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterConfig, com.google.cloud.dataproc.v1.ClusterConfig.Builder, com.google.cloud.dataproc.v1.ClusterConfigOrBuilder>( + getConfig(), + getParentForChildren(), + isClean()); + config_ = null; + } + return configBuilder_; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> labels_; + private com.google.protobuf.MapField + internalGetLabels() { + if (labels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + return labels_; + } + private com.google.protobuf.MapField + internalGetMutableLabels() { + onChanged();; + if (labels_ == null) { + labels_ = com.google.protobuf.MapField.newMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + if (!labels_.isMutable()) { + labels_ = labels_.copy(); + } + return labels_; + } + + public int getLabelsCount() { + return internalGetLabels().getMap().size(); + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + + public boolean containsLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabels().getMap().containsKey(key); + } + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabels() { + return getLabelsMap(); + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + + public java.util.Map getLabelsMap() { + return internalGetLabels().getMap(); + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + + public java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + + public java.lang.String getLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearLabels() { + internalGetMutableLabels().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + + public Builder removeLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableLabels().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableLabels() { + return internalGetMutableLabels().getMutableMap(); + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + public Builder putLabels( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableLabels().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Optional. The labels to associate with this cluster.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given cluster.
+     * 
+ * + * map<string, string> labels = 4; + */ + + public Builder putAllLabels( + java.util.Map values) { + internalGetMutableLabels().getMutableMap() + .putAll(values); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ManagedCluster) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ManagedCluster) + private static final com.google.cloud.dataproc.v1.ManagedCluster DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ManagedCluster(); + } + + public static com.google.cloud.dataproc.v1.ManagedCluster getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ManagedCluster parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ManagedCluster(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ManagedCluster getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedClusterOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedClusterOrBuilder.java new file mode 100644 index 000000000000..f1a3c250ce8c --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedClusterOrBuilder.java @@ -0,0 +1,144 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ManagedClusterOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ManagedCluster) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The cluster name prefix. A unique cluster name will be formed by
+   * appending a random suffix.
+   * The name must contain only lower-case letters (a-z), numbers (0-9),
+   * and hyphens (-). Must begin with a letter. Cannot begin or end with
+   * hyphen. Must consist of between 2 and 35 characters.
+   * 
+ * + * string cluster_name = 2; + */ + java.lang.String getClusterName(); + /** + *
+   * Required. The cluster name prefix. A unique cluster name will be formed by
+   * appending a random suffix.
+   * The name must contain only lower-case letters (a-z), numbers (0-9),
+   * and hyphens (-). Must begin with a letter. Cannot begin or end with
+   * hyphen. Must consist of between 2 and 35 characters.
+   * 
+ * + * string cluster_name = 2; + */ + com.google.protobuf.ByteString + getClusterNameBytes(); + + /** + *
+   * Required. The cluster configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + boolean hasConfig(); + /** + *
+   * Required. The cluster configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + com.google.cloud.dataproc.v1.ClusterConfig getConfig(); + /** + *
+   * Required. The cluster configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterConfig config = 3; + */ + com.google.cloud.dataproc.v1.ClusterConfigOrBuilder getConfigOrBuilder(); + + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + int getLabelsCount(); + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + boolean containsLabels( + java.lang.String key); + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getLabels(); + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + java.util.Map + getLabelsMap(); + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + + java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Optional. The labels to associate with this cluster.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given cluster.
+   * 
+ * + * map<string, string> labels = 4; + */ + + java.lang.String getLabelsOrThrow( + java.lang.String key); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfig.java index 592c6615bb85..11750f7772bc 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfig.java @@ -96,7 +96,7 @@ private ManagedGroupConfig( private volatile java.lang.Object instanceTemplateName_; /** *
-   * Output-only. The name of the Instance Template used for the Managed
+   * Output only. The name of the Instance Template used for the Managed
    * Instance Group.
    * 
* @@ -116,7 +116,7 @@ public java.lang.String getInstanceTemplateName() { } /** *
-   * Output-only. The name of the Instance Template used for the Managed
+   * Output only. The name of the Instance Template used for the Managed
    * Instance Group.
    * 
* @@ -140,7 +140,7 @@ public java.lang.String getInstanceTemplateName() { private volatile java.lang.Object instanceGroupManagerName_; /** *
-   * Output-only. The name of the Instance Group Manager for this group.
+   * Output only. The name of the Instance Group Manager for this group.
    * 
* * string instance_group_manager_name = 2; @@ -159,7 +159,7 @@ public java.lang.String getInstanceGroupManagerName() { } /** *
-   * Output-only. The name of the Instance Group Manager for this group.
+   * Output only. The name of the Instance Group Manager for this group.
    * 
* * string instance_group_manager_name = 2; @@ -505,7 +505,7 @@ public Builder mergeFrom( private java.lang.Object instanceTemplateName_ = ""; /** *
-     * Output-only. The name of the Instance Template used for the Managed
+     * Output only. The name of the Instance Template used for the Managed
      * Instance Group.
      * 
* @@ -525,7 +525,7 @@ public java.lang.String getInstanceTemplateName() { } /** *
-     * Output-only. The name of the Instance Template used for the Managed
+     * Output only. The name of the Instance Template used for the Managed
      * Instance Group.
      * 
* @@ -546,7 +546,7 @@ public java.lang.String getInstanceTemplateName() { } /** *
-     * Output-only. The name of the Instance Template used for the Managed
+     * Output only. The name of the Instance Template used for the Managed
      * Instance Group.
      * 
* @@ -564,7 +564,7 @@ public Builder setInstanceTemplateName( } /** *
-     * Output-only. The name of the Instance Template used for the Managed
+     * Output only. The name of the Instance Template used for the Managed
      * Instance Group.
      * 
* @@ -578,7 +578,7 @@ public Builder clearInstanceTemplateName() { } /** *
-     * Output-only. The name of the Instance Template used for the Managed
+     * Output only. The name of the Instance Template used for the Managed
      * Instance Group.
      * 
* @@ -599,7 +599,7 @@ public Builder setInstanceTemplateNameBytes( private java.lang.Object instanceGroupManagerName_ = ""; /** *
-     * Output-only. The name of the Instance Group Manager for this group.
+     * Output only. The name of the Instance Group Manager for this group.
      * 
* * string instance_group_manager_name = 2; @@ -618,7 +618,7 @@ public java.lang.String getInstanceGroupManagerName() { } /** *
-     * Output-only. The name of the Instance Group Manager for this group.
+     * Output only. The name of the Instance Group Manager for this group.
      * 
* * string instance_group_manager_name = 2; @@ -638,7 +638,7 @@ public java.lang.String getInstanceGroupManagerName() { } /** *
-     * Output-only. The name of the Instance Group Manager for this group.
+     * Output only. The name of the Instance Group Manager for this group.
      * 
* * string instance_group_manager_name = 2; @@ -655,7 +655,7 @@ public Builder setInstanceGroupManagerName( } /** *
-     * Output-only. The name of the Instance Group Manager for this group.
+     * Output only. The name of the Instance Group Manager for this group.
      * 
* * string instance_group_manager_name = 2; @@ -668,7 +668,7 @@ public Builder clearInstanceGroupManagerName() { } /** *
-     * Output-only. The name of the Instance Group Manager for this group.
+     * Output only. The name of the Instance Group Manager for this group.
      * 
* * string instance_group_manager_name = 2; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfigOrBuilder.java index fd245d893d14..a43e01a8d83b 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ManagedGroupConfigOrBuilder.java @@ -9,7 +9,7 @@ public interface ManagedGroupConfigOrBuilder extends /** *
-   * Output-only. The name of the Instance Template used for the Managed
+   * Output only. The name of the Instance Template used for the Managed
    * Instance Group.
    * 
* @@ -18,7 +18,7 @@ public interface ManagedGroupConfigOrBuilder extends java.lang.String getInstanceTemplateName(); /** *
-   * Output-only. The name of the Instance Template used for the Managed
+   * Output only. The name of the Instance Template used for the Managed
    * Instance Group.
    * 
* @@ -29,7 +29,7 @@ public interface ManagedGroupConfigOrBuilder extends /** *
-   * Output-only. The name of the Instance Group Manager for this group.
+   * Output only. The name of the Instance Group Manager for this group.
    * 
* * string instance_group_manager_name = 2; @@ -37,7 +37,7 @@ public interface ManagedGroupConfigOrBuilder extends java.lang.String getInstanceGroupManagerName(); /** *
-   * Output-only. The name of the Instance Group Manager for this group.
+   * Output only. The name of the Instance Group Manager for this group.
    * 
* * string instance_group_manager_name = 2; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationAction.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationAction.java index 5fbdba4310ba..aae0370eac62 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationAction.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationAction.java @@ -103,7 +103,7 @@ private NodeInitializationAction( private volatile java.lang.Object executableFile_; /** *
-   * Required. Google Cloud Storage URI of executable file.
+   * Required. Cloud Storage URI of executable file.
    * 
* * string executable_file = 1; @@ -122,7 +122,7 @@ public java.lang.String getExecutableFile() { } /** *
-   * Required. Google Cloud Storage URI of executable file.
+   * Required. Cloud Storage URI of executable file.
    * 
* * string executable_file = 1; @@ -524,7 +524,7 @@ public Builder mergeFrom( private java.lang.Object executableFile_ = ""; /** *
-     * Required. Google Cloud Storage URI of executable file.
+     * Required. Cloud Storage URI of executable file.
      * 
* * string executable_file = 1; @@ -543,7 +543,7 @@ public java.lang.String getExecutableFile() { } /** *
-     * Required. Google Cloud Storage URI of executable file.
+     * Required. Cloud Storage URI of executable file.
      * 
* * string executable_file = 1; @@ -563,7 +563,7 @@ public java.lang.String getExecutableFile() { } /** *
-     * Required. Google Cloud Storage URI of executable file.
+     * Required. Cloud Storage URI of executable file.
      * 
* * string executable_file = 1; @@ -580,7 +580,7 @@ public Builder setExecutableFile( } /** *
-     * Required. Google Cloud Storage URI of executable file.
+     * Required. Cloud Storage URI of executable file.
      * 
* * string executable_file = 1; @@ -593,7 +593,7 @@ public Builder clearExecutableFile() { } /** *
-     * Required. Google Cloud Storage URI of executable file.
+     * Required. Cloud Storage URI of executable file.
      * 
* * string executable_file = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationActionOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationActionOrBuilder.java index 5fd55e83c377..f72cdfd1b098 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationActionOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/NodeInitializationActionOrBuilder.java @@ -9,7 +9,7 @@ public interface NodeInitializationActionOrBuilder extends /** *
-   * Required. Google Cloud Storage URI of executable file.
+   * Required. Cloud Storage URI of executable file.
    * 
* * string executable_file = 1; @@ -17,7 +17,7 @@ public interface NodeInitializationActionOrBuilder extends java.lang.String getExecutableFile(); /** *
-   * Required. Google Cloud Storage URI of executable file.
+   * Required. Cloud Storage URI of executable file.
    * 
* * string executable_file = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OperationsProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OperationsProto.java index 1b5700bc2f71..94b04623b056 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OperationsProto.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OperationsProto.java @@ -40,29 +40,27 @@ public static void registerAllExtensions( java.lang.String[] descriptorData = { "\n)google/cloud/dataproc/v1/operations.pr" + "oto\022\030google.cloud.dataproc.v1\032\034google/ap" + - "i/annotations.proto\032#google/longrunning/" + - "operations.proto\032\033google/protobuf/empty." + - "proto\032\037google/protobuf/timestamp.proto\"\365" + - "\001\n\026ClusterOperationStatus\022E\n\005state\030\001 \001(\016" + - "26.google.cloud.dataproc.v1.ClusterOpera" + - "tionStatus.State\022\023\n\013inner_state\030\002 \001(\t\022\017\n" + - "\007details\030\003 \001(\t\0224\n\020state_start_time\030\004 \001(\013" + - "2\032.google.protobuf.Timestamp\"8\n\005State\022\013\n" + - "\007UNKNOWN\020\000\022\013\n\007PENDING\020\001\022\013\n\007RUNNING\020\002\022\010\n\004" + - "DONE\020\003\"\220\003\n\030ClusterOperationMetadata\022\024\n\014c" + - "luster_name\030\007 \001(\t\022\024\n\014cluster_uuid\030\010 \001(\t\022" + - "@\n\006status\030\t \001(\01320.google.cloud.dataproc." + - "v1.ClusterOperationStatus\022H\n\016status_hist" + - "ory\030\n \003(\01320.google.cloud.dataproc.v1.Clu" + - "sterOperationStatus\022\026\n\016operation_type\030\013 " + - "\001(\t\022\023\n\013description\030\014 \001(\t\022N\n\006labels\030\r \003(\013" + - "2>.google.cloud.dataproc.v1.ClusterOpera" + - "tionMetadata.LabelsEntry\022\020\n\010warnings\030\016 \003" + - "(\t\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030" + - "\002 \001(\t:\0028\001Bs\n\034com.google.cloud.dataproc.v" + - "1B\017OperationsProtoP\001Z@google.golang.org/" + - "genproto/googleapis/cloud/dataproc/v1;da" + - "taprocb\006proto3" + "i/annotations.proto\032\037google/protobuf/tim" + + "estamp.proto\"\365\001\n\026ClusterOperationStatus\022" + + "E\n\005state\030\001 \001(\01626.google.cloud.dataproc.v" + + "1.ClusterOperationStatus.State\022\023\n\013inner_" + + "state\030\002 \001(\t\022\017\n\007details\030\003 \001(\t\0224\n\020state_st" + + "art_time\030\004 \001(\0132\032.google.protobuf.Timesta" + + "mp\"8\n\005State\022\013\n\007UNKNOWN\020\000\022\013\n\007PENDING\020\001\022\013\n" + + "\007RUNNING\020\002\022\010\n\004DONE\020\003\"\220\003\n\030ClusterOperatio" + + "nMetadata\022\024\n\014cluster_name\030\007 \001(\t\022\024\n\014clust" + + "er_uuid\030\010 \001(\t\022@\n\006status\030\t \001(\01320.google.c" + + "loud.dataproc.v1.ClusterOperationStatus\022" + + "H\n\016status_history\030\n \003(\01320.google.cloud.d" + + "ataproc.v1.ClusterOperationStatus\022\026\n\016ope" + + "ration_type\030\013 \001(\t\022\023\n\013description\030\014 \001(\t\022N" + + "\n\006labels\030\r \003(\0132>.google.cloud.dataproc.v" + + "1.ClusterOperationMetadata.LabelsEntry\022\020" + + "\n\010warnings\030\016 \003(\t\032-\n\013LabelsEntry\022\013\n\003key\030\001" + + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001Bs\n\034com.google.cl" + + "oud.dataproc.v1B\017OperationsProtoP\001Z@goog" + + "le.golang.org/genproto/googleapis/cloud/" + + "dataproc/v1;dataprocb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -76,8 +74,6 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), - com.google.longrunning.OperationsProto.getDescriptor(), - com.google.protobuf.EmptyProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), }, assigner); internal_static_google_cloud_dataproc_v1_ClusterOperationStatus_descriptor = @@ -99,8 +95,6 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1_ClusterOperationMetadata_LabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); com.google.api.AnnotationsProto.getDescriptor(); - com.google.longrunning.OperationsProto.getDescriptor(); - com.google.protobuf.EmptyProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJob.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJob.java new file mode 100644 index 000000000000..7b4e25b703d0 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJob.java @@ -0,0 +1,3065 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A job executed by the workflow.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.OrderedJob} + */ +public final class OrderedJob extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.OrderedJob) + OrderedJobOrBuilder { +private static final long serialVersionUID = 0L; + // Use OrderedJob.newBuilder() to construct. + private OrderedJob(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OrderedJob() { + stepId_ = ""; + prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private OrderedJob( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + stepId_ = s; + break; + } + case 18: { + com.google.cloud.dataproc.v1.HadoopJob.Builder subBuilder = null; + if (jobTypeCase_ == 2) { + subBuilder = ((com.google.cloud.dataproc.v1.HadoopJob) jobType_).toBuilder(); + } + jobType_ = + input.readMessage(com.google.cloud.dataproc.v1.HadoopJob.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.HadoopJob) jobType_); + jobType_ = subBuilder.buildPartial(); + } + jobTypeCase_ = 2; + break; + } + case 26: { + com.google.cloud.dataproc.v1.SparkJob.Builder subBuilder = null; + if (jobTypeCase_ == 3) { + subBuilder = ((com.google.cloud.dataproc.v1.SparkJob) jobType_).toBuilder(); + } + jobType_ = + input.readMessage(com.google.cloud.dataproc.v1.SparkJob.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.SparkJob) jobType_); + jobType_ = subBuilder.buildPartial(); + } + jobTypeCase_ = 3; + break; + } + case 34: { + com.google.cloud.dataproc.v1.PySparkJob.Builder subBuilder = null; + if (jobTypeCase_ == 4) { + subBuilder = ((com.google.cloud.dataproc.v1.PySparkJob) jobType_).toBuilder(); + } + jobType_ = + input.readMessage(com.google.cloud.dataproc.v1.PySparkJob.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.PySparkJob) jobType_); + jobType_ = subBuilder.buildPartial(); + } + jobTypeCase_ = 4; + break; + } + case 42: { + com.google.cloud.dataproc.v1.HiveJob.Builder subBuilder = null; + if (jobTypeCase_ == 5) { + subBuilder = ((com.google.cloud.dataproc.v1.HiveJob) jobType_).toBuilder(); + } + jobType_ = + input.readMessage(com.google.cloud.dataproc.v1.HiveJob.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.HiveJob) jobType_); + jobType_ = subBuilder.buildPartial(); + } + jobTypeCase_ = 5; + break; + } + case 50: { + com.google.cloud.dataproc.v1.PigJob.Builder subBuilder = null; + if (jobTypeCase_ == 6) { + subBuilder = ((com.google.cloud.dataproc.v1.PigJob) jobType_).toBuilder(); + } + jobType_ = + input.readMessage(com.google.cloud.dataproc.v1.PigJob.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.PigJob) jobType_); + jobType_ = subBuilder.buildPartial(); + } + jobTypeCase_ = 6; + break; + } + case 58: { + com.google.cloud.dataproc.v1.SparkSqlJob.Builder subBuilder = null; + if (jobTypeCase_ == 7) { + subBuilder = ((com.google.cloud.dataproc.v1.SparkSqlJob) jobType_).toBuilder(); + } + jobType_ = + input.readMessage(com.google.cloud.dataproc.v1.SparkSqlJob.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.SparkSqlJob) jobType_); + jobType_ = subBuilder.buildPartial(); + } + jobTypeCase_ = 7; + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + labels_ = com.google.protobuf.MapField.newMapField( + LabelsDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000080; + } + com.google.protobuf.MapEntry + labels__ = input.readMessage( + LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + labels_.getMutableMap().put( + labels__.getKey(), labels__.getValue()); + break; + } + case 74: { + com.google.cloud.dataproc.v1.JobScheduling.Builder subBuilder = null; + if (scheduling_ != null) { + subBuilder = scheduling_.toBuilder(); + } + scheduling_ = input.readMessage(com.google.cloud.dataproc.v1.JobScheduling.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scheduling_); + scheduling_ = subBuilder.buildPartial(); + } + + break; + } + case 82: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { + prerequisiteStepIds_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000200; + } + prerequisiteStepIds_.add(s); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { + prerequisiteStepIds_ = prerequisiteStepIds_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 8: + return internalGetLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.OrderedJob.class, com.google.cloud.dataproc.v1.OrderedJob.Builder.class); + } + + private int bitField0_; + private int jobTypeCase_ = 0; + private java.lang.Object jobType_; + public enum JobTypeCase + implements com.google.protobuf.Internal.EnumLite { + HADOOP_JOB(2), + SPARK_JOB(3), + PYSPARK_JOB(4), + HIVE_JOB(5), + PIG_JOB(6), + SPARK_SQL_JOB(7), + JOBTYPE_NOT_SET(0); + private final int value; + private JobTypeCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static JobTypeCase valueOf(int value) { + return forNumber(value); + } + + public static JobTypeCase forNumber(int value) { + switch (value) { + case 2: return HADOOP_JOB; + case 3: return SPARK_JOB; + case 4: return PYSPARK_JOB; + case 5: return HIVE_JOB; + case 6: return PIG_JOB; + case 7: return SPARK_SQL_JOB; + case 0: return JOBTYPE_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public JobTypeCase + getJobTypeCase() { + return JobTypeCase.forNumber( + jobTypeCase_); + } + + public static final int STEP_ID_FIELD_NUMBER = 1; + private volatile java.lang.Object stepId_; + /** + *
+   * Required. The step id. The id must be unique among all jobs
+   * within the template.
+   * The step id is used as prefix for job id, as job
+   * `goog-dataproc-workflow-step-id` label, and in
+   * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+   * steps.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string step_id = 1; + */ + public java.lang.String getStepId() { + java.lang.Object ref = stepId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + stepId_ = s; + return s; + } + } + /** + *
+   * Required. The step id. The id must be unique among all jobs
+   * within the template.
+   * The step id is used as prefix for job id, as job
+   * `goog-dataproc-workflow-step-id` label, and in
+   * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+   * steps.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string step_id = 1; + */ + public com.google.protobuf.ByteString + getStepIdBytes() { + java.lang.Object ref = stepId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stepId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int HADOOP_JOB_FIELD_NUMBER = 2; + /** + *
+   * Job is a Hadoop job.
+   * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public boolean hasHadoopJob() { + return jobTypeCase_ == 2; + } + /** + *
+   * Job is a Hadoop job.
+   * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() { + if (jobTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.HadoopJob) jobType_; + } + return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance(); + } + /** + *
+   * Job is a Hadoop job.
+   * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() { + if (jobTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.HadoopJob) jobType_; + } + return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance(); + } + + public static final int SPARK_JOB_FIELD_NUMBER = 3; + /** + *
+   * Job is a Spark job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public boolean hasSparkJob() { + return jobTypeCase_ == 3; + } + /** + *
+   * Job is a Spark job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public com.google.cloud.dataproc.v1.SparkJob getSparkJob() { + if (jobTypeCase_ == 3) { + return (com.google.cloud.dataproc.v1.SparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance(); + } + /** + *
+   * Job is a Spark job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() { + if (jobTypeCase_ == 3) { + return (com.google.cloud.dataproc.v1.SparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance(); + } + + public static final int PYSPARK_JOB_FIELD_NUMBER = 4; + /** + *
+   * Job is a Pyspark job.
+   * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public boolean hasPysparkJob() { + return jobTypeCase_ == 4; + } + /** + *
+   * Job is a Pyspark job.
+   * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() { + if (jobTypeCase_ == 4) { + return (com.google.cloud.dataproc.v1.PySparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance(); + } + /** + *
+   * Job is a Pyspark job.
+   * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() { + if (jobTypeCase_ == 4) { + return (com.google.cloud.dataproc.v1.PySparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance(); + } + + public static final int HIVE_JOB_FIELD_NUMBER = 5; + /** + *
+   * Job is a Hive job.
+   * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public boolean hasHiveJob() { + return jobTypeCase_ == 5; + } + /** + *
+   * Job is a Hive job.
+   * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public com.google.cloud.dataproc.v1.HiveJob getHiveJob() { + if (jobTypeCase_ == 5) { + return (com.google.cloud.dataproc.v1.HiveJob) jobType_; + } + return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance(); + } + /** + *
+   * Job is a Hive job.
+   * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() { + if (jobTypeCase_ == 5) { + return (com.google.cloud.dataproc.v1.HiveJob) jobType_; + } + return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance(); + } + + public static final int PIG_JOB_FIELD_NUMBER = 6; + /** + *
+   * Job is a Pig job.
+   * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public boolean hasPigJob() { + return jobTypeCase_ == 6; + } + /** + *
+   * Job is a Pig job.
+   * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public com.google.cloud.dataproc.v1.PigJob getPigJob() { + if (jobTypeCase_ == 6) { + return (com.google.cloud.dataproc.v1.PigJob) jobType_; + } + return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance(); + } + /** + *
+   * Job is a Pig job.
+   * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() { + if (jobTypeCase_ == 6) { + return (com.google.cloud.dataproc.v1.PigJob) jobType_; + } + return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance(); + } + + public static final int SPARK_SQL_JOB_FIELD_NUMBER = 7; + /** + *
+   * Job is a SparkSql job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public boolean hasSparkSqlJob() { + return jobTypeCase_ == 7; + } + /** + *
+   * Job is a SparkSql job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() { + if (jobTypeCase_ == 7) { + return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance(); + } + /** + *
+   * Job is a SparkSql job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() { + if (jobTypeCase_ == 7) { + return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance(); + } + + public static final int LABELS_FIELD_NUMBER = 8; + private static final class LabelsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_OrderedJob_LabelsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> labels_; + private com.google.protobuf.MapField + internalGetLabels() { + if (labels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + return labels_; + } + + public int getLabelsCount() { + return internalGetLabels().getMap().size(); + } + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + + public boolean containsLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabels().getMap().containsKey(key); + } + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabels() { + return getLabelsMap(); + } + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + + public java.util.Map getLabelsMap() { + return internalGetLabels().getMap(); + } + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + + public java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + + public java.lang.String getLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int SCHEDULING_FIELD_NUMBER = 9; + private com.google.cloud.dataproc.v1.JobScheduling scheduling_; + /** + *
+   * Optional. Job scheduling configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public boolean hasScheduling() { + return scheduling_ != null; + } + /** + *
+   * Optional. Job scheduling configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public com.google.cloud.dataproc.v1.JobScheduling getScheduling() { + return scheduling_ == null ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance() : scheduling_; + } + /** + *
+   * Optional. Job scheduling configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() { + return getScheduling(); + } + + public static final int PREREQUISITE_STEP_IDS_FIELD_NUMBER = 10; + private com.google.protobuf.LazyStringList prerequisiteStepIds_; + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public com.google.protobuf.ProtocolStringList + getPrerequisiteStepIdsList() { + return prerequisiteStepIds_; + } + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public int getPrerequisiteStepIdsCount() { + return prerequisiteStepIds_.size(); + } + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public java.lang.String getPrerequisiteStepIds(int index) { + return prerequisiteStepIds_.get(index); + } + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public com.google.protobuf.ByteString + getPrerequisiteStepIdsBytes(int index) { + return prerequisiteStepIds_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getStepIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, stepId_); + } + if (jobTypeCase_ == 2) { + output.writeMessage(2, (com.google.cloud.dataproc.v1.HadoopJob) jobType_); + } + if (jobTypeCase_ == 3) { + output.writeMessage(3, (com.google.cloud.dataproc.v1.SparkJob) jobType_); + } + if (jobTypeCase_ == 4) { + output.writeMessage(4, (com.google.cloud.dataproc.v1.PySparkJob) jobType_); + } + if (jobTypeCase_ == 5) { + output.writeMessage(5, (com.google.cloud.dataproc.v1.HiveJob) jobType_); + } + if (jobTypeCase_ == 6) { + output.writeMessage(6, (com.google.cloud.dataproc.v1.PigJob) jobType_); + } + if (jobTypeCase_ == 7) { + output.writeMessage(7, (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetLabels(), + LabelsDefaultEntryHolder.defaultEntry, + 8); + if (scheduling_ != null) { + output.writeMessage(9, getScheduling()); + } + for (int i = 0; i < prerequisiteStepIds_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 10, prerequisiteStepIds_.getRaw(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getStepIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, stepId_); + } + if (jobTypeCase_ == 2) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, (com.google.cloud.dataproc.v1.HadoopJob) jobType_); + } + if (jobTypeCase_ == 3) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, (com.google.cloud.dataproc.v1.SparkJob) jobType_); + } + if (jobTypeCase_ == 4) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, (com.google.cloud.dataproc.v1.PySparkJob) jobType_); + } + if (jobTypeCase_ == 5) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, (com.google.cloud.dataproc.v1.HiveJob) jobType_); + } + if (jobTypeCase_ == 6) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, (com.google.cloud.dataproc.v1.PigJob) jobType_); + } + if (jobTypeCase_ == 7) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_); + } + for (java.util.Map.Entry entry + : internalGetLabels().getMap().entrySet()) { + com.google.protobuf.MapEntry + labels__ = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, labels__); + } + if (scheduling_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(9, getScheduling()); + } + { + int dataSize = 0; + for (int i = 0; i < prerequisiteStepIds_.size(); i++) { + dataSize += computeStringSizeNoTag(prerequisiteStepIds_.getRaw(i)); + } + size += dataSize; + size += 1 * getPrerequisiteStepIdsList().size(); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.OrderedJob)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.OrderedJob other = (com.google.cloud.dataproc.v1.OrderedJob) obj; + + boolean result = true; + result = result && getStepId() + .equals(other.getStepId()); + result = result && internalGetLabels().equals( + other.internalGetLabels()); + result = result && (hasScheduling() == other.hasScheduling()); + if (hasScheduling()) { + result = result && getScheduling() + .equals(other.getScheduling()); + } + result = result && getPrerequisiteStepIdsList() + .equals(other.getPrerequisiteStepIdsList()); + result = result && getJobTypeCase().equals( + other.getJobTypeCase()); + if (!result) return false; + switch (jobTypeCase_) { + case 2: + result = result && getHadoopJob() + .equals(other.getHadoopJob()); + break; + case 3: + result = result && getSparkJob() + .equals(other.getSparkJob()); + break; + case 4: + result = result && getPysparkJob() + .equals(other.getPysparkJob()); + break; + case 5: + result = result && getHiveJob() + .equals(other.getHiveJob()); + break; + case 6: + result = result && getPigJob() + .equals(other.getPigJob()); + break; + case 7: + result = result && getSparkSqlJob() + .equals(other.getSparkSqlJob()); + break; + case 0: + default: + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + getStepId().hashCode(); + if (!internalGetLabels().getMap().isEmpty()) { + hash = (37 * hash) + LABELS_FIELD_NUMBER; + hash = (53 * hash) + internalGetLabels().hashCode(); + } + if (hasScheduling()) { + hash = (37 * hash) + SCHEDULING_FIELD_NUMBER; + hash = (53 * hash) + getScheduling().hashCode(); + } + if (getPrerequisiteStepIdsCount() > 0) { + hash = (37 * hash) + PREREQUISITE_STEP_IDS_FIELD_NUMBER; + hash = (53 * hash) + getPrerequisiteStepIdsList().hashCode(); + } + switch (jobTypeCase_) { + case 2: + hash = (37 * hash) + HADOOP_JOB_FIELD_NUMBER; + hash = (53 * hash) + getHadoopJob().hashCode(); + break; + case 3: + hash = (37 * hash) + SPARK_JOB_FIELD_NUMBER; + hash = (53 * hash) + getSparkJob().hashCode(); + break; + case 4: + hash = (37 * hash) + PYSPARK_JOB_FIELD_NUMBER; + hash = (53 * hash) + getPysparkJob().hashCode(); + break; + case 5: + hash = (37 * hash) + HIVE_JOB_FIELD_NUMBER; + hash = (53 * hash) + getHiveJob().hashCode(); + break; + case 6: + hash = (37 * hash) + PIG_JOB_FIELD_NUMBER; + hash = (53 * hash) + getPigJob().hashCode(); + break; + case 7: + hash = (37 * hash) + SPARK_SQL_JOB_FIELD_NUMBER; + hash = (53 * hash) + getSparkSqlJob().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.OrderedJob parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.OrderedJob prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A job executed by the workflow.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.OrderedJob} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.OrderedJob) + com.google.cloud.dataproc.v1.OrderedJobOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 8: + return internalGetLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 8: + return internalGetMutableLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.OrderedJob.class, com.google.cloud.dataproc.v1.OrderedJob.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.OrderedJob.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + stepId_ = ""; + + internalGetMutableLabels().clear(); + if (schedulingBuilder_ == null) { + scheduling_ = null; + } else { + scheduling_ = null; + schedulingBuilder_ = null; + } + prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000200); + jobTypeCase_ = 0; + jobType_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.OrderedJob getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.OrderedJob build() { + com.google.cloud.dataproc.v1.OrderedJob result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.OrderedJob buildPartial() { + com.google.cloud.dataproc.v1.OrderedJob result = new com.google.cloud.dataproc.v1.OrderedJob(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.stepId_ = stepId_; + if (jobTypeCase_ == 2) { + if (hadoopJobBuilder_ == null) { + result.jobType_ = jobType_; + } else { + result.jobType_ = hadoopJobBuilder_.build(); + } + } + if (jobTypeCase_ == 3) { + if (sparkJobBuilder_ == null) { + result.jobType_ = jobType_; + } else { + result.jobType_ = sparkJobBuilder_.build(); + } + } + if (jobTypeCase_ == 4) { + if (pysparkJobBuilder_ == null) { + result.jobType_ = jobType_; + } else { + result.jobType_ = pysparkJobBuilder_.build(); + } + } + if (jobTypeCase_ == 5) { + if (hiveJobBuilder_ == null) { + result.jobType_ = jobType_; + } else { + result.jobType_ = hiveJobBuilder_.build(); + } + } + if (jobTypeCase_ == 6) { + if (pigJobBuilder_ == null) { + result.jobType_ = jobType_; + } else { + result.jobType_ = pigJobBuilder_.build(); + } + } + if (jobTypeCase_ == 7) { + if (sparkSqlJobBuilder_ == null) { + result.jobType_ = jobType_; + } else { + result.jobType_ = sparkSqlJobBuilder_.build(); + } + } + result.labels_ = internalGetLabels(); + result.labels_.makeImmutable(); + if (schedulingBuilder_ == null) { + result.scheduling_ = scheduling_; + } else { + result.scheduling_ = schedulingBuilder_.build(); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + prerequisiteStepIds_ = prerequisiteStepIds_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000200); + } + result.prerequisiteStepIds_ = prerequisiteStepIds_; + result.bitField0_ = to_bitField0_; + result.jobTypeCase_ = jobTypeCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.OrderedJob) { + return mergeFrom((com.google.cloud.dataproc.v1.OrderedJob)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.OrderedJob other) { + if (other == com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance()) return this; + if (!other.getStepId().isEmpty()) { + stepId_ = other.stepId_; + onChanged(); + } + internalGetMutableLabels().mergeFrom( + other.internalGetLabels()); + if (other.hasScheduling()) { + mergeScheduling(other.getScheduling()); + } + if (!other.prerequisiteStepIds_.isEmpty()) { + if (prerequisiteStepIds_.isEmpty()) { + prerequisiteStepIds_ = other.prerequisiteStepIds_; + bitField0_ = (bitField0_ & ~0x00000200); + } else { + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.addAll(other.prerequisiteStepIds_); + } + onChanged(); + } + switch (other.getJobTypeCase()) { + case HADOOP_JOB: { + mergeHadoopJob(other.getHadoopJob()); + break; + } + case SPARK_JOB: { + mergeSparkJob(other.getSparkJob()); + break; + } + case PYSPARK_JOB: { + mergePysparkJob(other.getPysparkJob()); + break; + } + case HIVE_JOB: { + mergeHiveJob(other.getHiveJob()); + break; + } + case PIG_JOB: { + mergePigJob(other.getPigJob()); + break; + } + case SPARK_SQL_JOB: { + mergeSparkSqlJob(other.getSparkSqlJob()); + break; + } + case JOBTYPE_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.OrderedJob parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.OrderedJob) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int jobTypeCase_ = 0; + private java.lang.Object jobType_; + public JobTypeCase + getJobTypeCase() { + return JobTypeCase.forNumber( + jobTypeCase_); + } + + public Builder clearJobType() { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + return this; + } + + private int bitField0_; + + private java.lang.Object stepId_ = ""; + /** + *
+     * Required. The step id. The id must be unique among all jobs
+     * within the template.
+     * The step id is used as prefix for job id, as job
+     * `goog-dataproc-workflow-step-id` label, and in
+     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+     * steps.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string step_id = 1; + */ + public java.lang.String getStepId() { + java.lang.Object ref = stepId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + stepId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The step id. The id must be unique among all jobs
+     * within the template.
+     * The step id is used as prefix for job id, as job
+     * `goog-dataproc-workflow-step-id` label, and in
+     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+     * steps.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string step_id = 1; + */ + public com.google.protobuf.ByteString + getStepIdBytes() { + java.lang.Object ref = stepId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stepId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The step id. The id must be unique among all jobs
+     * within the template.
+     * The step id is used as prefix for job id, as job
+     * `goog-dataproc-workflow-step-id` label, and in
+     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+     * steps.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string step_id = 1; + */ + public Builder setStepId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The step id. The id must be unique among all jobs
+     * within the template.
+     * The step id is used as prefix for job id, as job
+     * `goog-dataproc-workflow-step-id` label, and in
+     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+     * steps.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = getDefaultInstance().getStepId(); + onChanged(); + return this; + } + /** + *
+     * Required. The step id. The id must be unique among all jobs
+     * within the template.
+     * The step id is used as prefix for job id, as job
+     * `goog-dataproc-workflow-step-id` label, and in
+     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+     * steps.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string step_id = 1; + */ + public Builder setStepIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + stepId_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.HadoopJob, com.google.cloud.dataproc.v1.HadoopJob.Builder, com.google.cloud.dataproc.v1.HadoopJobOrBuilder> hadoopJobBuilder_; + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public boolean hasHadoopJob() { + return jobTypeCase_ == 2; + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() { + if (hadoopJobBuilder_ == null) { + if (jobTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.HadoopJob) jobType_; + } + return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance(); + } else { + if (jobTypeCase_ == 2) { + return hadoopJobBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) { + if (hadoopJobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + jobType_ = value; + onChanged(); + } else { + hadoopJobBuilder_.setMessage(value); + } + jobTypeCase_ = 2; + return this; + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public Builder setHadoopJob( + com.google.cloud.dataproc.v1.HadoopJob.Builder builderForValue) { + if (hadoopJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + hadoopJobBuilder_.setMessage(builderForValue.build()); + } + jobTypeCase_ = 2; + return this; + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public Builder mergeHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) { + if (hadoopJobBuilder_ == null) { + if (jobTypeCase_ == 2 && + jobType_ != com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance()) { + jobType_ = com.google.cloud.dataproc.v1.HadoopJob.newBuilder((com.google.cloud.dataproc.v1.HadoopJob) jobType_) + .mergeFrom(value).buildPartial(); + } else { + jobType_ = value; + } + onChanged(); + } else { + if (jobTypeCase_ == 2) { + hadoopJobBuilder_.mergeFrom(value); + } + hadoopJobBuilder_.setMessage(value); + } + jobTypeCase_ = 2; + return this; + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public Builder clearHadoopJob() { + if (hadoopJobBuilder_ == null) { + if (jobTypeCase_ == 2) { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + } + } else { + if (jobTypeCase_ == 2) { + jobTypeCase_ = 0; + jobType_ = null; + } + hadoopJobBuilder_.clear(); + } + return this; + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public com.google.cloud.dataproc.v1.HadoopJob.Builder getHadoopJobBuilder() { + return getHadoopJobFieldBuilder().getBuilder(); + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() { + if ((jobTypeCase_ == 2) && (hadoopJobBuilder_ != null)) { + return hadoopJobBuilder_.getMessageOrBuilder(); + } else { + if (jobTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.HadoopJob) jobType_; + } + return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Hadoop job.
+     * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.HadoopJob, com.google.cloud.dataproc.v1.HadoopJob.Builder, com.google.cloud.dataproc.v1.HadoopJobOrBuilder> + getHadoopJobFieldBuilder() { + if (hadoopJobBuilder_ == null) { + if (!(jobTypeCase_ == 2)) { + jobType_ = com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance(); + } + hadoopJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.HadoopJob, com.google.cloud.dataproc.v1.HadoopJob.Builder, com.google.cloud.dataproc.v1.HadoopJobOrBuilder>( + (com.google.cloud.dataproc.v1.HadoopJob) jobType_, + getParentForChildren(), + isClean()); + jobType_ = null; + } + jobTypeCase_ = 2; + onChanged();; + return hadoopJobBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.SparkJob, com.google.cloud.dataproc.v1.SparkJob.Builder, com.google.cloud.dataproc.v1.SparkJobOrBuilder> sparkJobBuilder_; + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public boolean hasSparkJob() { + return jobTypeCase_ == 3; + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public com.google.cloud.dataproc.v1.SparkJob getSparkJob() { + if (sparkJobBuilder_ == null) { + if (jobTypeCase_ == 3) { + return (com.google.cloud.dataproc.v1.SparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance(); + } else { + if (jobTypeCase_ == 3) { + return sparkJobBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob value) { + if (sparkJobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + jobType_ = value; + onChanged(); + } else { + sparkJobBuilder_.setMessage(value); + } + jobTypeCase_ = 3; + return this; + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public Builder setSparkJob( + com.google.cloud.dataproc.v1.SparkJob.Builder builderForValue) { + if (sparkJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + sparkJobBuilder_.setMessage(builderForValue.build()); + } + jobTypeCase_ = 3; + return this; + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public Builder mergeSparkJob(com.google.cloud.dataproc.v1.SparkJob value) { + if (sparkJobBuilder_ == null) { + if (jobTypeCase_ == 3 && + jobType_ != com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance()) { + jobType_ = com.google.cloud.dataproc.v1.SparkJob.newBuilder((com.google.cloud.dataproc.v1.SparkJob) jobType_) + .mergeFrom(value).buildPartial(); + } else { + jobType_ = value; + } + onChanged(); + } else { + if (jobTypeCase_ == 3) { + sparkJobBuilder_.mergeFrom(value); + } + sparkJobBuilder_.setMessage(value); + } + jobTypeCase_ = 3; + return this; + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public Builder clearSparkJob() { + if (sparkJobBuilder_ == null) { + if (jobTypeCase_ == 3) { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + } + } else { + if (jobTypeCase_ == 3) { + jobTypeCase_ = 0; + jobType_ = null; + } + sparkJobBuilder_.clear(); + } + return this; + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public com.google.cloud.dataproc.v1.SparkJob.Builder getSparkJobBuilder() { + return getSparkJobFieldBuilder().getBuilder(); + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() { + if ((jobTypeCase_ == 3) && (sparkJobBuilder_ != null)) { + return sparkJobBuilder_.getMessageOrBuilder(); + } else { + if (jobTypeCase_ == 3) { + return (com.google.cloud.dataproc.v1.SparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Spark job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.SparkJob, com.google.cloud.dataproc.v1.SparkJob.Builder, com.google.cloud.dataproc.v1.SparkJobOrBuilder> + getSparkJobFieldBuilder() { + if (sparkJobBuilder_ == null) { + if (!(jobTypeCase_ == 3)) { + jobType_ = com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance(); + } + sparkJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.SparkJob, com.google.cloud.dataproc.v1.SparkJob.Builder, com.google.cloud.dataproc.v1.SparkJobOrBuilder>( + (com.google.cloud.dataproc.v1.SparkJob) jobType_, + getParentForChildren(), + isClean()); + jobType_ = null; + } + jobTypeCase_ = 3; + onChanged();; + return sparkJobBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.PySparkJob, com.google.cloud.dataproc.v1.PySparkJob.Builder, com.google.cloud.dataproc.v1.PySparkJobOrBuilder> pysparkJobBuilder_; + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public boolean hasPysparkJob() { + return jobTypeCase_ == 4; + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() { + if (pysparkJobBuilder_ == null) { + if (jobTypeCase_ == 4) { + return (com.google.cloud.dataproc.v1.PySparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance(); + } else { + if (jobTypeCase_ == 4) { + return pysparkJobBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) { + if (pysparkJobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + jobType_ = value; + onChanged(); + } else { + pysparkJobBuilder_.setMessage(value); + } + jobTypeCase_ = 4; + return this; + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public Builder setPysparkJob( + com.google.cloud.dataproc.v1.PySparkJob.Builder builderForValue) { + if (pysparkJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + pysparkJobBuilder_.setMessage(builderForValue.build()); + } + jobTypeCase_ = 4; + return this; + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public Builder mergePysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) { + if (pysparkJobBuilder_ == null) { + if (jobTypeCase_ == 4 && + jobType_ != com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance()) { + jobType_ = com.google.cloud.dataproc.v1.PySparkJob.newBuilder((com.google.cloud.dataproc.v1.PySparkJob) jobType_) + .mergeFrom(value).buildPartial(); + } else { + jobType_ = value; + } + onChanged(); + } else { + if (jobTypeCase_ == 4) { + pysparkJobBuilder_.mergeFrom(value); + } + pysparkJobBuilder_.setMessage(value); + } + jobTypeCase_ = 4; + return this; + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public Builder clearPysparkJob() { + if (pysparkJobBuilder_ == null) { + if (jobTypeCase_ == 4) { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + } + } else { + if (jobTypeCase_ == 4) { + jobTypeCase_ = 0; + jobType_ = null; + } + pysparkJobBuilder_.clear(); + } + return this; + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public com.google.cloud.dataproc.v1.PySparkJob.Builder getPysparkJobBuilder() { + return getPysparkJobFieldBuilder().getBuilder(); + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() { + if ((jobTypeCase_ == 4) && (pysparkJobBuilder_ != null)) { + return pysparkJobBuilder_.getMessageOrBuilder(); + } else { + if (jobTypeCase_ == 4) { + return (com.google.cloud.dataproc.v1.PySparkJob) jobType_; + } + return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Pyspark job.
+     * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.PySparkJob, com.google.cloud.dataproc.v1.PySparkJob.Builder, com.google.cloud.dataproc.v1.PySparkJobOrBuilder> + getPysparkJobFieldBuilder() { + if (pysparkJobBuilder_ == null) { + if (!(jobTypeCase_ == 4)) { + jobType_ = com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance(); + } + pysparkJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.PySparkJob, com.google.cloud.dataproc.v1.PySparkJob.Builder, com.google.cloud.dataproc.v1.PySparkJobOrBuilder>( + (com.google.cloud.dataproc.v1.PySparkJob) jobType_, + getParentForChildren(), + isClean()); + jobType_ = null; + } + jobTypeCase_ = 4; + onChanged();; + return pysparkJobBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.HiveJob, com.google.cloud.dataproc.v1.HiveJob.Builder, com.google.cloud.dataproc.v1.HiveJobOrBuilder> hiveJobBuilder_; + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public boolean hasHiveJob() { + return jobTypeCase_ == 5; + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public com.google.cloud.dataproc.v1.HiveJob getHiveJob() { + if (hiveJobBuilder_ == null) { + if (jobTypeCase_ == 5) { + return (com.google.cloud.dataproc.v1.HiveJob) jobType_; + } + return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance(); + } else { + if (jobTypeCase_ == 5) { + return hiveJobBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob value) { + if (hiveJobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + jobType_ = value; + onChanged(); + } else { + hiveJobBuilder_.setMessage(value); + } + jobTypeCase_ = 5; + return this; + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public Builder setHiveJob( + com.google.cloud.dataproc.v1.HiveJob.Builder builderForValue) { + if (hiveJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + hiveJobBuilder_.setMessage(builderForValue.build()); + } + jobTypeCase_ = 5; + return this; + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public Builder mergeHiveJob(com.google.cloud.dataproc.v1.HiveJob value) { + if (hiveJobBuilder_ == null) { + if (jobTypeCase_ == 5 && + jobType_ != com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance()) { + jobType_ = com.google.cloud.dataproc.v1.HiveJob.newBuilder((com.google.cloud.dataproc.v1.HiveJob) jobType_) + .mergeFrom(value).buildPartial(); + } else { + jobType_ = value; + } + onChanged(); + } else { + if (jobTypeCase_ == 5) { + hiveJobBuilder_.mergeFrom(value); + } + hiveJobBuilder_.setMessage(value); + } + jobTypeCase_ = 5; + return this; + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public Builder clearHiveJob() { + if (hiveJobBuilder_ == null) { + if (jobTypeCase_ == 5) { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + } + } else { + if (jobTypeCase_ == 5) { + jobTypeCase_ = 0; + jobType_ = null; + } + hiveJobBuilder_.clear(); + } + return this; + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public com.google.cloud.dataproc.v1.HiveJob.Builder getHiveJobBuilder() { + return getHiveJobFieldBuilder().getBuilder(); + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() { + if ((jobTypeCase_ == 5) && (hiveJobBuilder_ != null)) { + return hiveJobBuilder_.getMessageOrBuilder(); + } else { + if (jobTypeCase_ == 5) { + return (com.google.cloud.dataproc.v1.HiveJob) jobType_; + } + return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Hive job.
+     * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.HiveJob, com.google.cloud.dataproc.v1.HiveJob.Builder, com.google.cloud.dataproc.v1.HiveJobOrBuilder> + getHiveJobFieldBuilder() { + if (hiveJobBuilder_ == null) { + if (!(jobTypeCase_ == 5)) { + jobType_ = com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance(); + } + hiveJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.HiveJob, com.google.cloud.dataproc.v1.HiveJob.Builder, com.google.cloud.dataproc.v1.HiveJobOrBuilder>( + (com.google.cloud.dataproc.v1.HiveJob) jobType_, + getParentForChildren(), + isClean()); + jobType_ = null; + } + jobTypeCase_ = 5; + onChanged();; + return hiveJobBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.PigJob, com.google.cloud.dataproc.v1.PigJob.Builder, com.google.cloud.dataproc.v1.PigJobOrBuilder> pigJobBuilder_; + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public boolean hasPigJob() { + return jobTypeCase_ == 6; + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public com.google.cloud.dataproc.v1.PigJob getPigJob() { + if (pigJobBuilder_ == null) { + if (jobTypeCase_ == 6) { + return (com.google.cloud.dataproc.v1.PigJob) jobType_; + } + return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance(); + } else { + if (jobTypeCase_ == 6) { + return pigJobBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob value) { + if (pigJobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + jobType_ = value; + onChanged(); + } else { + pigJobBuilder_.setMessage(value); + } + jobTypeCase_ = 6; + return this; + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public Builder setPigJob( + com.google.cloud.dataproc.v1.PigJob.Builder builderForValue) { + if (pigJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + pigJobBuilder_.setMessage(builderForValue.build()); + } + jobTypeCase_ = 6; + return this; + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public Builder mergePigJob(com.google.cloud.dataproc.v1.PigJob value) { + if (pigJobBuilder_ == null) { + if (jobTypeCase_ == 6 && + jobType_ != com.google.cloud.dataproc.v1.PigJob.getDefaultInstance()) { + jobType_ = com.google.cloud.dataproc.v1.PigJob.newBuilder((com.google.cloud.dataproc.v1.PigJob) jobType_) + .mergeFrom(value).buildPartial(); + } else { + jobType_ = value; + } + onChanged(); + } else { + if (jobTypeCase_ == 6) { + pigJobBuilder_.mergeFrom(value); + } + pigJobBuilder_.setMessage(value); + } + jobTypeCase_ = 6; + return this; + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public Builder clearPigJob() { + if (pigJobBuilder_ == null) { + if (jobTypeCase_ == 6) { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + } + } else { + if (jobTypeCase_ == 6) { + jobTypeCase_ = 0; + jobType_ = null; + } + pigJobBuilder_.clear(); + } + return this; + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public com.google.cloud.dataproc.v1.PigJob.Builder getPigJobBuilder() { + return getPigJobFieldBuilder().getBuilder(); + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() { + if ((jobTypeCase_ == 6) && (pigJobBuilder_ != null)) { + return pigJobBuilder_.getMessageOrBuilder(); + } else { + if (jobTypeCase_ == 6) { + return (com.google.cloud.dataproc.v1.PigJob) jobType_; + } + return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a Pig job.
+     * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.PigJob, com.google.cloud.dataproc.v1.PigJob.Builder, com.google.cloud.dataproc.v1.PigJobOrBuilder> + getPigJobFieldBuilder() { + if (pigJobBuilder_ == null) { + if (!(jobTypeCase_ == 6)) { + jobType_ = com.google.cloud.dataproc.v1.PigJob.getDefaultInstance(); + } + pigJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.PigJob, com.google.cloud.dataproc.v1.PigJob.Builder, com.google.cloud.dataproc.v1.PigJobOrBuilder>( + (com.google.cloud.dataproc.v1.PigJob) jobType_, + getParentForChildren(), + isClean()); + jobType_ = null; + } + jobTypeCase_ = 6; + onChanged();; + return pigJobBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.SparkSqlJob, com.google.cloud.dataproc.v1.SparkSqlJob.Builder, com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder> sparkSqlJobBuilder_; + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public boolean hasSparkSqlJob() { + return jobTypeCase_ == 7; + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() { + if (sparkSqlJobBuilder_ == null) { + if (jobTypeCase_ == 7) { + return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance(); + } else { + if (jobTypeCase_ == 7) { + return sparkSqlJobBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public Builder setSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) { + if (sparkSqlJobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + jobType_ = value; + onChanged(); + } else { + sparkSqlJobBuilder_.setMessage(value); + } + jobTypeCase_ = 7; + return this; + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public Builder setSparkSqlJob( + com.google.cloud.dataproc.v1.SparkSqlJob.Builder builderForValue) { + if (sparkSqlJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + sparkSqlJobBuilder_.setMessage(builderForValue.build()); + } + jobTypeCase_ = 7; + return this; + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public Builder mergeSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) { + if (sparkSqlJobBuilder_ == null) { + if (jobTypeCase_ == 7 && + jobType_ != com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance()) { + jobType_ = com.google.cloud.dataproc.v1.SparkSqlJob.newBuilder((com.google.cloud.dataproc.v1.SparkSqlJob) jobType_) + .mergeFrom(value).buildPartial(); + } else { + jobType_ = value; + } + onChanged(); + } else { + if (jobTypeCase_ == 7) { + sparkSqlJobBuilder_.mergeFrom(value); + } + sparkSqlJobBuilder_.setMessage(value); + } + jobTypeCase_ = 7; + return this; + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public Builder clearSparkSqlJob() { + if (sparkSqlJobBuilder_ == null) { + if (jobTypeCase_ == 7) { + jobTypeCase_ = 0; + jobType_ = null; + onChanged(); + } + } else { + if (jobTypeCase_ == 7) { + jobTypeCase_ = 0; + jobType_ = null; + } + sparkSqlJobBuilder_.clear(); + } + return this; + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public com.google.cloud.dataproc.v1.SparkSqlJob.Builder getSparkSqlJobBuilder() { + return getSparkSqlJobFieldBuilder().getBuilder(); + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() { + if ((jobTypeCase_ == 7) && (sparkSqlJobBuilder_ != null)) { + return sparkSqlJobBuilder_.getMessageOrBuilder(); + } else { + if (jobTypeCase_ == 7) { + return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_; + } + return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance(); + } + } + /** + *
+     * Job is a SparkSql job.
+     * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.SparkSqlJob, com.google.cloud.dataproc.v1.SparkSqlJob.Builder, com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder> + getSparkSqlJobFieldBuilder() { + if (sparkSqlJobBuilder_ == null) { + if (!(jobTypeCase_ == 7)) { + jobType_ = com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance(); + } + sparkSqlJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.SparkSqlJob, com.google.cloud.dataproc.v1.SparkSqlJob.Builder, com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>( + (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_, + getParentForChildren(), + isClean()); + jobType_ = null; + } + jobTypeCase_ = 7; + onChanged();; + return sparkSqlJobBuilder_; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> labels_; + private com.google.protobuf.MapField + internalGetLabels() { + if (labels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + return labels_; + } + private com.google.protobuf.MapField + internalGetMutableLabels() { + onChanged();; + if (labels_ == null) { + labels_ = com.google.protobuf.MapField.newMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + if (!labels_.isMutable()) { + labels_ = labels_.copy(); + } + return labels_; + } + + public int getLabelsCount() { + return internalGetLabels().getMap().size(); + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + + public boolean containsLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabels().getMap().containsKey(key); + } + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabels() { + return getLabelsMap(); + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + + public java.util.Map getLabelsMap() { + return internalGetLabels().getMap(); + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + + public java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + + public java.lang.String getLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearLabels() { + internalGetMutableLabels().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + + public Builder removeLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableLabels().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableLabels() { + return internalGetMutableLabels().getMutableMap(); + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + public Builder putLabels( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableLabels().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Optional. The labels to associate with this job.
+     * Label keys must be between 1 and 63 characters long, and must conform to
+     * the following regular expression:
+     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+     * Label values must be between 1 and 63 characters long, and must conform to
+     * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+     * No more than 32 labels can be associated with a given job.
+     * 
+ * + * map<string, string> labels = 8; + */ + + public Builder putAllLabels( + java.util.Map values) { + internalGetMutableLabels().getMutableMap() + .putAll(values); + return this; + } + + private com.google.cloud.dataproc.v1.JobScheduling scheduling_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.JobScheduling, com.google.cloud.dataproc.v1.JobScheduling.Builder, com.google.cloud.dataproc.v1.JobSchedulingOrBuilder> schedulingBuilder_; + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public boolean hasScheduling() { + return schedulingBuilder_ != null || scheduling_ != null; + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public com.google.cloud.dataproc.v1.JobScheduling getScheduling() { + if (schedulingBuilder_ == null) { + return scheduling_ == null ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance() : scheduling_; + } else { + return schedulingBuilder_.getMessage(); + } + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public Builder setScheduling(com.google.cloud.dataproc.v1.JobScheduling value) { + if (schedulingBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scheduling_ = value; + onChanged(); + } else { + schedulingBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public Builder setScheduling( + com.google.cloud.dataproc.v1.JobScheduling.Builder builderForValue) { + if (schedulingBuilder_ == null) { + scheduling_ = builderForValue.build(); + onChanged(); + } else { + schedulingBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public Builder mergeScheduling(com.google.cloud.dataproc.v1.JobScheduling value) { + if (schedulingBuilder_ == null) { + if (scheduling_ != null) { + scheduling_ = + com.google.cloud.dataproc.v1.JobScheduling.newBuilder(scheduling_).mergeFrom(value).buildPartial(); + } else { + scheduling_ = value; + } + onChanged(); + } else { + schedulingBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public Builder clearScheduling() { + if (schedulingBuilder_ == null) { + scheduling_ = null; + onChanged(); + } else { + scheduling_ = null; + schedulingBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public com.google.cloud.dataproc.v1.JobScheduling.Builder getSchedulingBuilder() { + + onChanged(); + return getSchedulingFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() { + if (schedulingBuilder_ != null) { + return schedulingBuilder_.getMessageOrBuilder(); + } else { + return scheduling_ == null ? + com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance() : scheduling_; + } + } + /** + *
+     * Optional. Job scheduling configuration.
+     * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.JobScheduling, com.google.cloud.dataproc.v1.JobScheduling.Builder, com.google.cloud.dataproc.v1.JobSchedulingOrBuilder> + getSchedulingFieldBuilder() { + if (schedulingBuilder_ == null) { + schedulingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.JobScheduling, com.google.cloud.dataproc.v1.JobScheduling.Builder, com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>( + getScheduling(), + getParentForChildren(), + isClean()); + scheduling_ = null; + } + return schedulingBuilder_; + } + + private com.google.protobuf.LazyStringList prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensurePrerequisiteStepIdsIsMutable() { + if (!((bitField0_ & 0x00000200) == 0x00000200)) { + prerequisiteStepIds_ = new com.google.protobuf.LazyStringArrayList(prerequisiteStepIds_); + bitField0_ |= 0x00000200; + } + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public com.google.protobuf.ProtocolStringList + getPrerequisiteStepIdsList() { + return prerequisiteStepIds_.getUnmodifiableView(); + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public int getPrerequisiteStepIdsCount() { + return prerequisiteStepIds_.size(); + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public java.lang.String getPrerequisiteStepIds(int index) { + return prerequisiteStepIds_.get(index); + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public com.google.protobuf.ByteString + getPrerequisiteStepIdsBytes(int index) { + return prerequisiteStepIds_.getByteString(index); + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public Builder setPrerequisiteStepIds( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public Builder addPrerequisiteStepIds( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.add(value); + onChanged(); + return this; + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public Builder addAllPrerequisiteStepIds( + java.lang.Iterable values) { + ensurePrerequisiteStepIdsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, prerequisiteStepIds_); + onChanged(); + return this; + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public Builder clearPrerequisiteStepIds() { + prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000200); + onChanged(); + return this; + } + /** + *
+     * Optional. The optional list of prerequisite job step_ids.
+     * If not specified, the job will start at the beginning of workflow.
+     * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + public Builder addPrerequisiteStepIdsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.add(value); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.OrderedJob) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.OrderedJob) + private static final com.google.cloud.dataproc.v1.OrderedJob DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.OrderedJob(); + } + + public static com.google.cloud.dataproc.v1.OrderedJob getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public OrderedJob parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OrderedJob(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.OrderedJob getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJobOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJobOrBuilder.java new file mode 100644 index 000000000000..8117cc3b77bd --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/OrderedJobOrBuilder.java @@ -0,0 +1,343 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface OrderedJobOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.OrderedJob) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The step id. The id must be unique among all jobs
+   * within the template.
+   * The step id is used as prefix for job id, as job
+   * `goog-dataproc-workflow-step-id` label, and in
+   * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+   * steps.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string step_id = 1; + */ + java.lang.String getStepId(); + /** + *
+   * Required. The step id. The id must be unique among all jobs
+   * within the template.
+   * The step id is used as prefix for job id, as job
+   * `goog-dataproc-workflow-step-id` label, and in
+   * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
+   * steps.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string step_id = 1; + */ + com.google.protobuf.ByteString + getStepIdBytes(); + + /** + *
+   * Job is a Hadoop job.
+   * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + boolean hasHadoopJob(); + /** + *
+   * Job is a Hadoop job.
+   * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + com.google.cloud.dataproc.v1.HadoopJob getHadoopJob(); + /** + *
+   * Job is a Hadoop job.
+   * 
+ * + * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2; + */ + com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder(); + + /** + *
+   * Job is a Spark job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + boolean hasSparkJob(); + /** + *
+   * Job is a Spark job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + com.google.cloud.dataproc.v1.SparkJob getSparkJob(); + /** + *
+   * Job is a Spark job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkJob spark_job = 3; + */ + com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder(); + + /** + *
+   * Job is a Pyspark job.
+   * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + boolean hasPysparkJob(); + /** + *
+   * Job is a Pyspark job.
+   * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + com.google.cloud.dataproc.v1.PySparkJob getPysparkJob(); + /** + *
+   * Job is a Pyspark job.
+   * 
+ * + * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4; + */ + com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder(); + + /** + *
+   * Job is a Hive job.
+   * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + boolean hasHiveJob(); + /** + *
+   * Job is a Hive job.
+   * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + com.google.cloud.dataproc.v1.HiveJob getHiveJob(); + /** + *
+   * Job is a Hive job.
+   * 
+ * + * .google.cloud.dataproc.v1.HiveJob hive_job = 5; + */ + com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder(); + + /** + *
+   * Job is a Pig job.
+   * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + boolean hasPigJob(); + /** + *
+   * Job is a Pig job.
+   * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + com.google.cloud.dataproc.v1.PigJob getPigJob(); + /** + *
+   * Job is a Pig job.
+   * 
+ * + * .google.cloud.dataproc.v1.PigJob pig_job = 6; + */ + com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder(); + + /** + *
+   * Job is a SparkSql job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + boolean hasSparkSqlJob(); + /** + *
+   * Job is a SparkSql job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob(); + /** + *
+   * Job is a SparkSql job.
+   * 
+ * + * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7; + */ + com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder(); + + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + int getLabelsCount(); + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + boolean containsLabels( + java.lang.String key); + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getLabels(); + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + java.util.Map + getLabelsMap(); + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + + java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Optional. The labels to associate with this job.
+   * Label keys must be between 1 and 63 characters long, and must conform to
+   * the following regular expression:
+   * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
+   * Label values must be between 1 and 63 characters long, and must conform to
+   * the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
+   * No more than 32 labels can be associated with a given job.
+   * 
+ * + * map<string, string> labels = 8; + */ + + java.lang.String getLabelsOrThrow( + java.lang.String key); + + /** + *
+   * Optional. Job scheduling configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + boolean hasScheduling(); + /** + *
+   * Optional. Job scheduling configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + com.google.cloud.dataproc.v1.JobScheduling getScheduling(); + /** + *
+   * Optional. Job scheduling configuration.
+   * 
+ * + * .google.cloud.dataproc.v1.JobScheduling scheduling = 9; + */ + com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder(); + + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + java.util.List + getPrerequisiteStepIdsList(); + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + int getPrerequisiteStepIdsCount(); + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + java.lang.String getPrerequisiteStepIds(int index); + /** + *
+   * Optional. The optional list of prerequisite job step_ids.
+   * If not specified, the job will start at the beginning of workflow.
+   * 
+ * + * repeated string prerequisite_step_ids = 10; + */ + com.google.protobuf.ByteString + getPrerequisiteStepIdsBytes(int index); + + public com.google.cloud.dataproc.v1.OrderedJob.JobTypeCase getJobTypeCase(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidation.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidation.java new file mode 100644 index 000000000000..e26720a6003b --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidation.java @@ -0,0 +1,994 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * Configuration for parameter validation.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ParameterValidation} + */ +public final class ParameterValidation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ParameterValidation) + ParameterValidationOrBuilder { +private static final long serialVersionUID = 0L; + // Use ParameterValidation.newBuilder() to construct. + private ParameterValidation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ParameterValidation() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ParameterValidation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.cloud.dataproc.v1.RegexValidation.Builder subBuilder = null; + if (validationTypeCase_ == 1) { + subBuilder = ((com.google.cloud.dataproc.v1.RegexValidation) validationType_).toBuilder(); + } + validationType_ = + input.readMessage(com.google.cloud.dataproc.v1.RegexValidation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.RegexValidation) validationType_); + validationType_ = subBuilder.buildPartial(); + } + validationTypeCase_ = 1; + break; + } + case 18: { + com.google.cloud.dataproc.v1.ValueValidation.Builder subBuilder = null; + if (validationTypeCase_ == 2) { + subBuilder = ((com.google.cloud.dataproc.v1.ValueValidation) validationType_).toBuilder(); + } + validationType_ = + input.readMessage(com.google.cloud.dataproc.v1.ValueValidation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.ValueValidation) validationType_); + validationType_ = subBuilder.buildPartial(); + } + validationTypeCase_ = 2; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ParameterValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ParameterValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ParameterValidation.class, com.google.cloud.dataproc.v1.ParameterValidation.Builder.class); + } + + private int validationTypeCase_ = 0; + private java.lang.Object validationType_; + public enum ValidationTypeCase + implements com.google.protobuf.Internal.EnumLite { + REGEX(1), + VALUES(2), + VALIDATIONTYPE_NOT_SET(0); + private final int value; + private ValidationTypeCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ValidationTypeCase valueOf(int value) { + return forNumber(value); + } + + public static ValidationTypeCase forNumber(int value) { + switch (value) { + case 1: return REGEX; + case 2: return VALUES; + case 0: return VALIDATIONTYPE_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public ValidationTypeCase + getValidationTypeCase() { + return ValidationTypeCase.forNumber( + validationTypeCase_); + } + + public static final int REGEX_FIELD_NUMBER = 1; + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public boolean hasRegex() { + return validationTypeCase_ == 1; + } + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1.RegexValidation getRegex() { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1.RegexValidationOrBuilder getRegexOrBuilder() { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } + + public static final int VALUES_FIELD_NUMBER = 2; + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public boolean hasValues() { + return validationTypeCase_ == 2; + } + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1.ValueValidation getValues() { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1.ValueValidationOrBuilder getValuesOrBuilder() { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (validationTypeCase_ == 1) { + output.writeMessage(1, (com.google.cloud.dataproc.v1.RegexValidation) validationType_); + } + if (validationTypeCase_ == 2) { + output.writeMessage(2, (com.google.cloud.dataproc.v1.ValueValidation) validationType_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (validationTypeCase_ == 1) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, (com.google.cloud.dataproc.v1.RegexValidation) validationType_); + } + if (validationTypeCase_ == 2) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, (com.google.cloud.dataproc.v1.ValueValidation) validationType_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ParameterValidation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ParameterValidation other = (com.google.cloud.dataproc.v1.ParameterValidation) obj; + + boolean result = true; + result = result && getValidationTypeCase().equals( + other.getValidationTypeCase()); + if (!result) return false; + switch (validationTypeCase_) { + case 1: + result = result && getRegex() + .equals(other.getRegex()); + break; + case 2: + result = result && getValues() + .equals(other.getValues()); + break; + case 0: + default: + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + switch (validationTypeCase_) { + case 1: + hash = (37 * hash) + REGEX_FIELD_NUMBER; + hash = (53 * hash) + getRegex().hashCode(); + break; + case 2: + hash = (37 * hash) + VALUES_FIELD_NUMBER; + hash = (53 * hash) + getValues().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ParameterValidation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ParameterValidation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Configuration for parameter validation.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ParameterValidation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ParameterValidation) + com.google.cloud.dataproc.v1.ParameterValidationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ParameterValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ParameterValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ParameterValidation.class, com.google.cloud.dataproc.v1.ParameterValidation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ParameterValidation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + validationTypeCase_ = 0; + validationType_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ParameterValidation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ParameterValidation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ParameterValidation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ParameterValidation build() { + com.google.cloud.dataproc.v1.ParameterValidation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ParameterValidation buildPartial() { + com.google.cloud.dataproc.v1.ParameterValidation result = new com.google.cloud.dataproc.v1.ParameterValidation(this); + if (validationTypeCase_ == 1) { + if (regexBuilder_ == null) { + result.validationType_ = validationType_; + } else { + result.validationType_ = regexBuilder_.build(); + } + } + if (validationTypeCase_ == 2) { + if (valuesBuilder_ == null) { + result.validationType_ = validationType_; + } else { + result.validationType_ = valuesBuilder_.build(); + } + } + result.validationTypeCase_ = validationTypeCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ParameterValidation) { + return mergeFrom((com.google.cloud.dataproc.v1.ParameterValidation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ParameterValidation other) { + if (other == com.google.cloud.dataproc.v1.ParameterValidation.getDefaultInstance()) return this; + switch (other.getValidationTypeCase()) { + case REGEX: { + mergeRegex(other.getRegex()); + break; + } + case VALUES: { + mergeValues(other.getValues()); + break; + } + case VALIDATIONTYPE_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ParameterValidation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ParameterValidation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int validationTypeCase_ = 0; + private java.lang.Object validationType_; + public ValidationTypeCase + getValidationTypeCase() { + return ValidationTypeCase.forNumber( + validationTypeCase_); + } + + public Builder clearValidationType() { + validationTypeCase_ = 0; + validationType_ = null; + onChanged(); + return this; + } + + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.RegexValidation, com.google.cloud.dataproc.v1.RegexValidation.Builder, com.google.cloud.dataproc.v1.RegexValidationOrBuilder> regexBuilder_; + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public boolean hasRegex() { + return validationTypeCase_ == 1; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1.RegexValidation getRegex() { + if (regexBuilder_ == null) { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } else { + if (validationTypeCase_ == 1) { + return regexBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public Builder setRegex(com.google.cloud.dataproc.v1.RegexValidation value) { + if (regexBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + validationType_ = value; + onChanged(); + } else { + regexBuilder_.setMessage(value); + } + validationTypeCase_ = 1; + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public Builder setRegex( + com.google.cloud.dataproc.v1.RegexValidation.Builder builderForValue) { + if (regexBuilder_ == null) { + validationType_ = builderForValue.build(); + onChanged(); + } else { + regexBuilder_.setMessage(builderForValue.build()); + } + validationTypeCase_ = 1; + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public Builder mergeRegex(com.google.cloud.dataproc.v1.RegexValidation value) { + if (regexBuilder_ == null) { + if (validationTypeCase_ == 1 && + validationType_ != com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance()) { + validationType_ = com.google.cloud.dataproc.v1.RegexValidation.newBuilder((com.google.cloud.dataproc.v1.RegexValidation) validationType_) + .mergeFrom(value).buildPartial(); + } else { + validationType_ = value; + } + onChanged(); + } else { + if (validationTypeCase_ == 1) { + regexBuilder_.mergeFrom(value); + } + regexBuilder_.setMessage(value); + } + validationTypeCase_ = 1; + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public Builder clearRegex() { + if (regexBuilder_ == null) { + if (validationTypeCase_ == 1) { + validationTypeCase_ = 0; + validationType_ = null; + onChanged(); + } + } else { + if (validationTypeCase_ == 1) { + validationTypeCase_ = 0; + validationType_ = null; + } + regexBuilder_.clear(); + } + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1.RegexValidation.Builder getRegexBuilder() { + return getRegexFieldBuilder().getBuilder(); + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1.RegexValidationOrBuilder getRegexOrBuilder() { + if ((validationTypeCase_ == 1) && (regexBuilder_ != null)) { + return regexBuilder_.getMessageOrBuilder(); + } else { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.RegexValidation, com.google.cloud.dataproc.v1.RegexValidation.Builder, com.google.cloud.dataproc.v1.RegexValidationOrBuilder> + getRegexFieldBuilder() { + if (regexBuilder_ == null) { + if (!(validationTypeCase_ == 1)) { + validationType_ = com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } + regexBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.RegexValidation, com.google.cloud.dataproc.v1.RegexValidation.Builder, com.google.cloud.dataproc.v1.RegexValidationOrBuilder>( + (com.google.cloud.dataproc.v1.RegexValidation) validationType_, + getParentForChildren(), + isClean()); + validationType_ = null; + } + validationTypeCase_ = 1; + onChanged();; + return regexBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ValueValidation, com.google.cloud.dataproc.v1.ValueValidation.Builder, com.google.cloud.dataproc.v1.ValueValidationOrBuilder> valuesBuilder_; + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public boolean hasValues() { + return validationTypeCase_ == 2; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1.ValueValidation getValues() { + if (valuesBuilder_ == null) { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } else { + if (validationTypeCase_ == 2) { + return valuesBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public Builder setValues(com.google.cloud.dataproc.v1.ValueValidation value) { + if (valuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + validationType_ = value; + onChanged(); + } else { + valuesBuilder_.setMessage(value); + } + validationTypeCase_ = 2; + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public Builder setValues( + com.google.cloud.dataproc.v1.ValueValidation.Builder builderForValue) { + if (valuesBuilder_ == null) { + validationType_ = builderForValue.build(); + onChanged(); + } else { + valuesBuilder_.setMessage(builderForValue.build()); + } + validationTypeCase_ = 2; + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public Builder mergeValues(com.google.cloud.dataproc.v1.ValueValidation value) { + if (valuesBuilder_ == null) { + if (validationTypeCase_ == 2 && + validationType_ != com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance()) { + validationType_ = com.google.cloud.dataproc.v1.ValueValidation.newBuilder((com.google.cloud.dataproc.v1.ValueValidation) validationType_) + .mergeFrom(value).buildPartial(); + } else { + validationType_ = value; + } + onChanged(); + } else { + if (validationTypeCase_ == 2) { + valuesBuilder_.mergeFrom(value); + } + valuesBuilder_.setMessage(value); + } + validationTypeCase_ = 2; + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public Builder clearValues() { + if (valuesBuilder_ == null) { + if (validationTypeCase_ == 2) { + validationTypeCase_ = 0; + validationType_ = null; + onChanged(); + } + } else { + if (validationTypeCase_ == 2) { + validationTypeCase_ = 0; + validationType_ = null; + } + valuesBuilder_.clear(); + } + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1.ValueValidation.Builder getValuesBuilder() { + return getValuesFieldBuilder().getBuilder(); + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1.ValueValidationOrBuilder getValuesOrBuilder() { + if ((validationTypeCase_ == 2) && (valuesBuilder_ != null)) { + return valuesBuilder_.getMessageOrBuilder(); + } else { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ValueValidation, com.google.cloud.dataproc.v1.ValueValidation.Builder, com.google.cloud.dataproc.v1.ValueValidationOrBuilder> + getValuesFieldBuilder() { + if (valuesBuilder_ == null) { + if (!(validationTypeCase_ == 2)) { + validationType_ = com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } + valuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ValueValidation, com.google.cloud.dataproc.v1.ValueValidation.Builder, com.google.cloud.dataproc.v1.ValueValidationOrBuilder>( + (com.google.cloud.dataproc.v1.ValueValidation) validationType_, + getParentForChildren(), + isClean()); + validationType_ = null; + } + validationTypeCase_ = 2; + onChanged();; + return valuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ParameterValidation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ParameterValidation) + private static final com.google.cloud.dataproc.v1.ParameterValidation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ParameterValidation(); + } + + public static com.google.cloud.dataproc.v1.ParameterValidation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ParameterValidation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ParameterValidation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ParameterValidation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidationOrBuilder.java new file mode 100644 index 000000000000..4ff3c0a55e86 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ParameterValidationOrBuilder.java @@ -0,0 +1,61 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ParameterValidationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ParameterValidation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + boolean hasRegex(); + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + com.google.cloud.dataproc.v1.RegexValidation getRegex(); + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1.RegexValidation regex = 1; + */ + com.google.cloud.dataproc.v1.RegexValidationOrBuilder getRegexOrBuilder(); + + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + boolean hasValues(); + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + com.google.cloud.dataproc.v1.ValueValidation getValues(); + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1.ValueValidation values = 2; + */ + com.google.cloud.dataproc.v1.ValueValidationOrBuilder getValuesOrBuilder(); + + public com.google.cloud.dataproc.v1.ParameterValidation.ValidationTypeCase getValidationTypeCase(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidation.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidation.java new file mode 100644 index 000000000000..29d853e1209c --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidation.java @@ -0,0 +1,672 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * Validation based on regular expressions.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.RegexValidation} + */ +public final class RegexValidation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.RegexValidation) + RegexValidationOrBuilder { +private static final long serialVersionUID = 0L; + // Use RegexValidation.newBuilder() to construct. + private RegexValidation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RegexValidation() { + regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegexValidation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + regexes_.add(s); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = regexes_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_RegexValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_RegexValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.RegexValidation.class, com.google.cloud.dataproc.v1.RegexValidation.Builder.class); + } + + public static final int REGEXES_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList regexes_; + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ProtocolStringList + getRegexesList() { + return regexes_; + } + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public int getRegexesCount() { + return regexes_.size(); + } + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public java.lang.String getRegexes(int index) { + return regexes_.get(index); + } + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ByteString + getRegexesBytes(int index) { + return regexes_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < regexes_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, regexes_.getRaw(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < regexes_.size(); i++) { + dataSize += computeStringSizeNoTag(regexes_.getRaw(i)); + } + size += dataSize; + size += 1 * getRegexesList().size(); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.RegexValidation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.RegexValidation other = (com.google.cloud.dataproc.v1.RegexValidation) obj; + + boolean result = true; + result = result && getRegexesList() + .equals(other.getRegexesList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getRegexesCount() > 0) { + hash = (37 * hash) + REGEXES_FIELD_NUMBER; + hash = (53 * hash) + getRegexesList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.RegexValidation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.RegexValidation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.RegexValidation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.RegexValidation) + com.google.cloud.dataproc.v1.RegexValidationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_RegexValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_RegexValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.RegexValidation.class, com.google.cloud.dataproc.v1.RegexValidation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.RegexValidation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_RegexValidation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.RegexValidation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.RegexValidation build() { + com.google.cloud.dataproc.v1.RegexValidation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.RegexValidation buildPartial() { + com.google.cloud.dataproc.v1.RegexValidation result = new com.google.cloud.dataproc.v1.RegexValidation(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = regexes_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regexes_ = regexes_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.RegexValidation) { + return mergeFrom((com.google.cloud.dataproc.v1.RegexValidation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.RegexValidation other) { + if (other == com.google.cloud.dataproc.v1.RegexValidation.getDefaultInstance()) return this; + if (!other.regexes_.isEmpty()) { + if (regexes_.isEmpty()) { + regexes_ = other.regexes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegexesIsMutable(); + regexes_.addAll(other.regexes_); + } + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.RegexValidation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.RegexValidation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringList regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureRegexesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = new com.google.protobuf.LazyStringArrayList(regexes_); + bitField0_ |= 0x00000001; + } + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ProtocolStringList + getRegexesList() { + return regexes_.getUnmodifiableView(); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public int getRegexesCount() { + return regexes_.size(); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public java.lang.String getRegexes(int index) { + return regexes_.get(index); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ByteString + getRegexesBytes(int index) { + return regexes_.getByteString(index); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder setRegexes( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegexesIsMutable(); + regexes_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder addRegexes( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegexesIsMutable(); + regexes_.add(value); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder addAllRegexes( + java.lang.Iterable values) { + ensureRegexesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regexes_); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder clearRegexes() { + regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder addRegexesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureRegexesIsMutable(); + regexes_.add(value); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.RegexValidation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.RegexValidation) + private static final com.google.cloud.dataproc.v1.RegexValidation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.RegexValidation(); + } + + public static com.google.cloud.dataproc.v1.RegexValidation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public RegexValidation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegexValidation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.RegexValidation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidationOrBuilder.java new file mode 100644 index 000000000000..6afc3c1b17c9 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegexValidationOrBuilder.java @@ -0,0 +1,52 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface RegexValidationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.RegexValidation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + java.util.List + getRegexesList(); + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + int getRegexesCount(); + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + java.lang.String getRegexes(int index); + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + com.google.protobuf.ByteString + getRegexesBytes(int index); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java new file mode 100644 index 000000000000..e3d0120c0d16 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java @@ -0,0 +1,189 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.cloud.dataproc.v1; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import java.util.Map; +import java.util.ArrayList; +import java.util.List; + +// AUTO-GENERATED DOCUMENTATION AND CLASS +@javax.annotation.Generated("by GAPIC protoc plugin") +public class RegionName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding("projects/{project}/regions/{region}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String region; + + public String getProject() { + return project; + } + + public String getRegion() { + return region; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private RegionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + region = Preconditions.checkNotNull(builder.getRegion()); + } + + public static RegionName of(String project, String region) { + return newBuilder() + .setProject(project) + .setRegion(region) + .build(); + } + + public static String format(String project, String region) { + return newBuilder() + .setProject(project) + .setRegion(region) + .build() + .toString(); + } + + public static RegionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch(formattedString, "RegionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("region")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (RegionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("region", region); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "region", region); + } + + /** Builder for RegionName. */ + public static class Builder { + + private String project; + private String region; + + public String getProject() { + return project; + } + + public String getRegion() { + return region; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setRegion(String region) { + this.region = region; + return this; + } + + private Builder() { + } + + private Builder(RegionName regionName) { + project = regionName.project; + region = regionName.region; + } + + public RegionName build() { + return new RegionName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof RegionName) { + RegionName that = (RegionName) o; + return (this.project.equals(that.project)) + && (this.region.equals(that.region)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= region.hashCode(); + return h; + } +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfig.java index 2bb21b8ea8ed..a0bd5d93b001 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfig.java @@ -115,9 +115,11 @@ protected com.google.protobuf.MapField internalGetMapField( private volatile java.lang.Object imageVersion_; /** *
-   * Optional. The version of software inside the cluster. It must match the
-   * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-   * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+   * Optional. The version of software inside the cluster. It must be one of the supported
+   * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+   * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+   * If unspecified, it defaults to the latest version.
    * 
* * string image_version = 1; @@ -136,9 +138,11 @@ public java.lang.String getImageVersion() { } /** *
-   * Optional. The version of software inside the cluster. It must match the
-   * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-   * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+   * Optional. The version of software inside the cluster. It must be one of the supported
+   * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+   * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+   * If unspecified, it defaults to the latest version.
    * 
* * string image_version = 1; @@ -668,9 +672,11 @@ public Builder mergeFrom( private java.lang.Object imageVersion_ = ""; /** *
-     * Optional. The version of software inside the cluster. It must match the
-     * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-     * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+     * Optional. The version of software inside the cluster. It must be one of the supported
+     * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+     * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+     * If unspecified, it defaults to the latest version.
      * 
* * string image_version = 1; @@ -689,9 +695,11 @@ public java.lang.String getImageVersion() { } /** *
-     * Optional. The version of software inside the cluster. It must match the
-     * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-     * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+     * Optional. The version of software inside the cluster. It must be one of the supported
+     * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+     * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+     * If unspecified, it defaults to the latest version.
      * 
* * string image_version = 1; @@ -711,9 +719,11 @@ public java.lang.String getImageVersion() { } /** *
-     * Optional. The version of software inside the cluster. It must match the
-     * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-     * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+     * Optional. The version of software inside the cluster. It must be one of the supported
+     * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+     * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+     * If unspecified, it defaults to the latest version.
      * 
* * string image_version = 1; @@ -730,9 +740,11 @@ public Builder setImageVersion( } /** *
-     * Optional. The version of software inside the cluster. It must match the
-     * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-     * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+     * Optional. The version of software inside the cluster. It must be one of the supported
+     * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+     * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+     * If unspecified, it defaults to the latest version.
      * 
* * string image_version = 1; @@ -745,9 +757,11 @@ public Builder clearImageVersion() { } /** *
-     * Optional. The version of software inside the cluster. It must match the
-     * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-     * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+     * Optional. The version of software inside the cluster. It must be one of the supported
+     * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+     * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+     * If unspecified, it defaults to the latest version.
      * 
* * string image_version = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfigOrBuilder.java index 95c5be284919..2fb33ef6b1df 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SoftwareConfigOrBuilder.java @@ -9,9 +9,11 @@ public interface SoftwareConfigOrBuilder extends /** *
-   * Optional. The version of software inside the cluster. It must match the
-   * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-   * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+   * Optional. The version of software inside the cluster. It must be one of the supported
+   * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+   * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+   * If unspecified, it defaults to the latest version.
    * 
* * string image_version = 1; @@ -19,9 +21,11 @@ public interface SoftwareConfigOrBuilder extends java.lang.String getImageVersion(); /** *
-   * Optional. The version of software inside the cluster. It must match the
-   * regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the
-   * latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)).
+   * Optional. The version of software inside the cluster. It must be one of the supported
+   * [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * such as "1.2" (including a subminor version, such as "1.2.29"), or the
+   * ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
+   * If unspecified, it defaults to the latest version.
    * 
* * string image_version = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequest.java index 05cccdda1e49..101e25208cd0 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequest.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequest.java @@ -22,6 +22,7 @@ private SubmitJobRequest(com.google.protobuf.GeneratedMessageV3.Builder build private SubmitJobRequest() { projectId_ = ""; region_ = ""; + requestId_ = ""; } @java.lang.Override @@ -73,6 +74,12 @@ private SubmitJobRequest( region_ = s; break; } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -224,6 +231,64 @@ public com.google.cloud.dataproc.v1.JobOrBuilder getJobOrBuilder() { return getJob(); } + public static final int REQUEST_ID_FIELD_NUMBER = 4; + private volatile java.lang.Object requestId_; + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } + } + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -247,6 +312,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getRegionBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, region_); } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_); + } unknownFields.writeTo(output); } @@ -266,6 +334,9 @@ public int getSerializedSize() { if (!getRegionBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, region_); } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -291,6 +362,8 @@ public boolean equals(final java.lang.Object obj) { result = result && getJob() .equals(other.getJob()); } + result = result && getRequestId() + .equals(other.getRequestId()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -310,6 +383,8 @@ public int hashCode() { hash = (37 * hash) + JOB_FIELD_NUMBER; hash = (53 * hash) + getJob().hashCode(); } + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -457,6 +532,8 @@ public Builder clear() { job_ = null; jobBuilder_ = null; } + requestId_ = ""; + return this; } @@ -490,6 +567,7 @@ public com.google.cloud.dataproc.v1.SubmitJobRequest buildPartial() { } else { result.job_ = jobBuilder_.build(); } + result.requestId_ = requestId_; onBuilt(); return result; } @@ -549,6 +627,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1.SubmitJobRequest other) { if (other.hasJob()) { mergeJob(other.getJob()); } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -913,6 +995,135 @@ public com.google.cloud.dataproc.v1.JobOrBuilder getJobOrBuilder() { } return jobBuilder_; } + + private java.lang.Object requestId_ = ""; + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public Builder setRequestId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + requestId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public Builder clearRequestId() { + + requestId_ = getDefaultInstance().getRequestId(); + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+     * is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 4; + */ + public Builder setRequestIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + requestId_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequestOrBuilder.java index 38280e09d3d3..e19e67752a0f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequestOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SubmitJobRequestOrBuilder.java @@ -69,4 +69,38 @@ public interface SubmitJobRequestOrBuilder extends * .google.cloud.dataproc.v1.Job job = 2; */ com.google.cloud.dataproc.v1.JobOrBuilder getJobOrBuilder(); + + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + java.lang.String getRequestId(); + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
+   * is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 4; + */ + com.google.protobuf.ByteString + getRequestIdBytes(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameter.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameter.java new file mode 100644 index 000000000000..fb89c4e07326 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameter.java @@ -0,0 +1,1708 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A configurable parameter that replaces one or more fields in the template.
+ * Parameterizable fields:
+ * - Labels
+ * - File uris
+ * - Job properties
+ * - Job arguments
+ * - Script variables
+ * - Main class (in HadoopJob and SparkJob)
+ * - Zone (in ClusterSelector)
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.TemplateParameter} + */ +public final class TemplateParameter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.TemplateParameter) + TemplateParameterOrBuilder { +private static final long serialVersionUID = 0L; + // Use TemplateParameter.newBuilder() to construct. + private TemplateParameter(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private TemplateParameter() { + name_ = ""; + fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + description_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TemplateParameter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000002; + } + fields_.add(s); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + description_ = s; + break; + } + case 34: { + com.google.cloud.dataproc.v1.ParameterValidation.Builder subBuilder = null; + if (validation_ != null) { + subBuilder = validation_.toBuilder(); + } + validation_ = input.readMessage(com.google.cloud.dataproc.v1.ParameterValidation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(validation_); + validation_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = fields_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_TemplateParameter_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_TemplateParameter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.TemplateParameter.class, com.google.cloud.dataproc.v1.TemplateParameter.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FIELDS_FIELD_NUMBER = 2; + private com.google.protobuf.LazyStringList fields_; + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ProtocolStringList + getFieldsList() { + return fields_; + } + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public int getFieldsCount() { + return fields_.size(); + } + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public java.lang.String getFields(int index) { + return fields_.get(index); + } + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ByteString + getFieldsBytes(int index) { + return fields_.getByteString(index); + } + + public static final int DESCRIPTION_FIELD_NUMBER = 3; + private volatile java.lang.Object description_; + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VALIDATION_FIELD_NUMBER = 4; + private com.google.cloud.dataproc.v1.ParameterValidation validation_; + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public boolean hasValidation() { + return validation_ != null; + } + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1.ParameterValidation getValidation() { + return validation_ == null ? com.google.cloud.dataproc.v1.ParameterValidation.getDefaultInstance() : validation_; + } + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1.ParameterValidationOrBuilder getValidationOrBuilder() { + return getValidation(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + for (int i = 0; i < fields_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, fields_.getRaw(i)); + } + if (!getDescriptionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, description_); + } + if (validation_ != null) { + output.writeMessage(4, getValidation()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + { + int dataSize = 0; + for (int i = 0; i < fields_.size(); i++) { + dataSize += computeStringSizeNoTag(fields_.getRaw(i)); + } + size += dataSize; + size += 1 * getFieldsList().size(); + } + if (!getDescriptionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, description_); + } + if (validation_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getValidation()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.TemplateParameter)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.TemplateParameter other = (com.google.cloud.dataproc.v1.TemplateParameter) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getFieldsList() + .equals(other.getFieldsList()); + result = result && getDescription() + .equals(other.getDescription()); + result = result && (hasValidation() == other.hasValidation()); + if (hasValidation()) { + result = result && getValidation() + .equals(other.getValidation()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (getFieldsCount() > 0) { + hash = (37 * hash) + FIELDS_FIELD_NUMBER; + hash = (53 * hash) + getFieldsList().hashCode(); + } + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + if (hasValidation()) { + hash = (37 * hash) + VALIDATION_FIELD_NUMBER; + hash = (53 * hash) + getValidation().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.TemplateParameter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.TemplateParameter prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A configurable parameter that replaces one or more fields in the template.
+   * Parameterizable fields:
+   * - Labels
+   * - File uris
+   * - Job properties
+   * - Job arguments
+   * - Script variables
+   * - Main class (in HadoopJob and SparkJob)
+   * - Zone (in ClusterSelector)
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.TemplateParameter} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.TemplateParameter) + com.google.cloud.dataproc.v1.TemplateParameterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_TemplateParameter_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_TemplateParameter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.TemplateParameter.class, com.google.cloud.dataproc.v1.TemplateParameter.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.TemplateParameter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + description_ = ""; + + if (validationBuilder_ == null) { + validation_ = null; + } else { + validation_ = null; + validationBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_TemplateParameter_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.TemplateParameter getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.TemplateParameter.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.TemplateParameter build() { + com.google.cloud.dataproc.v1.TemplateParameter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.TemplateParameter buildPartial() { + com.google.cloud.dataproc.v1.TemplateParameter result = new com.google.cloud.dataproc.v1.TemplateParameter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = fields_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.fields_ = fields_; + result.description_ = description_; + if (validationBuilder_ == null) { + result.validation_ = validation_; + } else { + result.validation_ = validationBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.TemplateParameter) { + return mergeFrom((com.google.cloud.dataproc.v1.TemplateParameter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.TemplateParameter other) { + if (other == com.google.cloud.dataproc.v1.TemplateParameter.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.fields_.isEmpty()) { + if (fields_.isEmpty()) { + fields_ = other.fields_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFieldsIsMutable(); + fields_.addAll(other.fields_); + } + onChanged(); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + onChanged(); + } + if (other.hasValidation()) { + mergeValidation(other.getValidation()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.TemplateParameter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.TemplateParameter) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureFieldsIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = new com.google.protobuf.LazyStringArrayList(fields_); + bitField0_ |= 0x00000002; + } + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ProtocolStringList + getFieldsList() { + return fields_.getUnmodifiableView(); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public int getFieldsCount() { + return fields_.size(); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public java.lang.String getFields(int index) { + return fields_.get(index); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ByteString + getFieldsBytes(int index) { + return fields_.getByteString(index); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder setFields( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFieldsIsMutable(); + fields_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder addFields( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFieldsIsMutable(); + fields_.add(value); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder addAllFields( + java.lang.Iterable values) { + ensureFieldsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, fields_); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder clearFields() { + fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field
+     * paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder addFieldsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureFieldsIsMutable(); + fields_.add(value); + onChanged(); + return this; + } + + private java.lang.Object description_ = ""; + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + description_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public Builder clearDescription() { + + description_ = getDefaultInstance().getDescription(); + onChanged(); + return this; + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + description_ = value; + onChanged(); + return this; + } + + private com.google.cloud.dataproc.v1.ParameterValidation validation_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ParameterValidation, com.google.cloud.dataproc.v1.ParameterValidation.Builder, com.google.cloud.dataproc.v1.ParameterValidationOrBuilder> validationBuilder_; + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public boolean hasValidation() { + return validationBuilder_ != null || validation_ != null; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1.ParameterValidation getValidation() { + if (validationBuilder_ == null) { + return validation_ == null ? com.google.cloud.dataproc.v1.ParameterValidation.getDefaultInstance() : validation_; + } else { + return validationBuilder_.getMessage(); + } + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public Builder setValidation(com.google.cloud.dataproc.v1.ParameterValidation value) { + if (validationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + validation_ = value; + onChanged(); + } else { + validationBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public Builder setValidation( + com.google.cloud.dataproc.v1.ParameterValidation.Builder builderForValue) { + if (validationBuilder_ == null) { + validation_ = builderForValue.build(); + onChanged(); + } else { + validationBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public Builder mergeValidation(com.google.cloud.dataproc.v1.ParameterValidation value) { + if (validationBuilder_ == null) { + if (validation_ != null) { + validation_ = + com.google.cloud.dataproc.v1.ParameterValidation.newBuilder(validation_).mergeFrom(value).buildPartial(); + } else { + validation_ = value; + } + onChanged(); + } else { + validationBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public Builder clearValidation() { + if (validationBuilder_ == null) { + validation_ = null; + onChanged(); + } else { + validation_ = null; + validationBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1.ParameterValidation.Builder getValidationBuilder() { + + onChanged(); + return getValidationFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1.ParameterValidationOrBuilder getValidationOrBuilder() { + if (validationBuilder_ != null) { + return validationBuilder_.getMessageOrBuilder(); + } else { + return validation_ == null ? + com.google.cloud.dataproc.v1.ParameterValidation.getDefaultInstance() : validation_; + } + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ParameterValidation, com.google.cloud.dataproc.v1.ParameterValidation.Builder, com.google.cloud.dataproc.v1.ParameterValidationOrBuilder> + getValidationFieldBuilder() { + if (validationBuilder_ == null) { + validationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ParameterValidation, com.google.cloud.dataproc.v1.ParameterValidation.Builder, com.google.cloud.dataproc.v1.ParameterValidationOrBuilder>( + getValidation(), + getParentForChildren(), + isClean()); + validation_ = null; + } + return validationBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.TemplateParameter) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.TemplateParameter) + private static final com.google.cloud.dataproc.v1.TemplateParameter DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.TemplateParameter(); + } + + public static com.google.cloud.dataproc.v1.TemplateParameter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TemplateParameter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TemplateParameter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.TemplateParameter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameterOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameterOrBuilder.java new file mode 100644 index 000000000000..3d59badea987 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/TemplateParameterOrBuilder.java @@ -0,0 +1,259 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface TemplateParameterOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.TemplateParameter) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + java.util.List + getFieldsList(); + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + int getFieldsCount(); + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + java.lang.String getFields(int index); + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field
+   * paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + com.google.protobuf.ByteString + getFieldsBytes(int index); + + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + java.lang.String getDescription(); + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + boolean hasValidation(); + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + com.google.cloud.dataproc.v1.ParameterValidation getValidation(); + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1.ParameterValidation validation = 4; + */ + com.google.cloud.dataproc.v1.ParameterValidationOrBuilder getValidationOrBuilder(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequest.java index 17583b5e971e..431b7a1562ad 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequest.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequest.java @@ -23,6 +23,7 @@ private UpdateClusterRequest() { projectId_ = ""; region_ = ""; clusterName_ = ""; + requestId_ = ""; } @java.lang.Override @@ -93,6 +94,25 @@ private UpdateClusterRequest( region_ = s; break; } + case 50: { + com.google.protobuf.Duration.Builder subBuilder = null; + if (gracefulDecommissionTimeout_ != null) { + subBuilder = gracefulDecommissionTimeout_.toBuilder(); + } + gracefulDecommissionTimeout_ = input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(gracefulDecommissionTimeout_); + gracefulDecommissionTimeout_ = subBuilder.buildPartial(); + } + + break; + } + case 58: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -286,6 +306,57 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClusterOrBuilder() { return getCluster(); } + public static final int GRACEFUL_DECOMMISSION_TIMEOUT_FIELD_NUMBER = 6; + private com.google.protobuf.Duration gracefulDecommissionTimeout_; + /** + *
+   * Optional. Timeout for graceful YARN decomissioning. Graceful
+   * decommissioning allows removing nodes from the cluster without
+   * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+   * in progress to finish before forcefully removing nodes (and potentially
+   * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+   * the maximum allowed timeout is 1 day.
+   * Only supported on Dataproc image versions 1.2 and higher.
+   * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public boolean hasGracefulDecommissionTimeout() { + return gracefulDecommissionTimeout_ != null; + } + /** + *
+   * Optional. Timeout for graceful YARN decomissioning. Graceful
+   * decommissioning allows removing nodes from the cluster without
+   * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+   * in progress to finish before forcefully removing nodes (and potentially
+   * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+   * the maximum allowed timeout is 1 day.
+   * Only supported on Dataproc image versions 1.2 and higher.
+   * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public com.google.protobuf.Duration getGracefulDecommissionTimeout() { + return gracefulDecommissionTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : gracefulDecommissionTimeout_; + } + /** + *
+   * Optional. Timeout for graceful YARN decomissioning. Graceful
+   * decommissioning allows removing nodes from the cluster without
+   * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+   * in progress to finish before forcefully removing nodes (and potentially
+   * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+   * the maximum allowed timeout is 1 day.
+   * Only supported on Dataproc image versions 1.2 and higher.
+   * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public com.google.protobuf.DurationOrBuilder getGracefulDecommissionTimeoutOrBuilder() { + return getGracefulDecommissionTimeout(); + } + public static final int UPDATE_MASK_FIELD_NUMBER = 4; private com.google.protobuf.FieldMask updateMask_; /** @@ -448,6 +519,64 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return getUpdateMask(); } + public static final int REQUEST_ID_FIELD_NUMBER = 7; + private volatile java.lang.Object requestId_; + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 7; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } + } + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 7; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -477,6 +606,12 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getRegionBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, region_); } + if (gracefulDecommissionTimeout_ != null) { + output.writeMessage(6, getGracefulDecommissionTimeout()); + } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, requestId_); + } unknownFields.writeTo(output); } @@ -503,6 +638,13 @@ public int getSerializedSize() { if (!getRegionBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, region_); } + if (gracefulDecommissionTimeout_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, getGracefulDecommissionTimeout()); + } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, requestId_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -530,11 +672,18 @@ public boolean equals(final java.lang.Object obj) { result = result && getCluster() .equals(other.getCluster()); } + result = result && (hasGracefulDecommissionTimeout() == other.hasGracefulDecommissionTimeout()); + if (hasGracefulDecommissionTimeout()) { + result = result && getGracefulDecommissionTimeout() + .equals(other.getGracefulDecommissionTimeout()); + } result = result && (hasUpdateMask() == other.hasUpdateMask()); if (hasUpdateMask()) { result = result && getUpdateMask() .equals(other.getUpdateMask()); } + result = result && getRequestId() + .equals(other.getRequestId()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -556,10 +705,16 @@ public int hashCode() { hash = (37 * hash) + CLUSTER_FIELD_NUMBER; hash = (53 * hash) + getCluster().hashCode(); } + if (hasGracefulDecommissionTimeout()) { + hash = (37 * hash) + GRACEFUL_DECOMMISSION_TIMEOUT_FIELD_NUMBER; + hash = (53 * hash) + getGracefulDecommissionTimeout().hashCode(); + } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -709,12 +864,20 @@ public Builder clear() { cluster_ = null; clusterBuilder_ = null; } + if (gracefulDecommissionTimeoutBuilder_ == null) { + gracefulDecommissionTimeout_ = null; + } else { + gracefulDecommissionTimeout_ = null; + gracefulDecommissionTimeoutBuilder_ = null; + } if (updateMaskBuilder_ == null) { updateMask_ = null; } else { updateMask_ = null; updateMaskBuilder_ = null; } + requestId_ = ""; + return this; } @@ -749,11 +912,17 @@ public com.google.cloud.dataproc.v1.UpdateClusterRequest buildPartial() { } else { result.cluster_ = clusterBuilder_.build(); } + if (gracefulDecommissionTimeoutBuilder_ == null) { + result.gracefulDecommissionTimeout_ = gracefulDecommissionTimeout_; + } else { + result.gracefulDecommissionTimeout_ = gracefulDecommissionTimeoutBuilder_.build(); + } if (updateMaskBuilder_ == null) { result.updateMask_ = updateMask_; } else { result.updateMask_ = updateMaskBuilder_.build(); } + result.requestId_ = requestId_; onBuilt(); return result; } @@ -817,9 +986,16 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1.UpdateClusterRequest other if (other.hasCluster()) { mergeCluster(other.getCluster()); } + if (other.hasGracefulDecommissionTimeout()) { + mergeGracefulDecommissionTimeout(other.getGracefulDecommissionTimeout()); + } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1274,6 +1450,213 @@ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClusterOrBuilder() { return clusterBuilder_; } + private com.google.protobuf.Duration gracefulDecommissionTimeout_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> gracefulDecommissionTimeoutBuilder_; + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public boolean hasGracefulDecommissionTimeout() { + return gracefulDecommissionTimeoutBuilder_ != null || gracefulDecommissionTimeout_ != null; + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public com.google.protobuf.Duration getGracefulDecommissionTimeout() { + if (gracefulDecommissionTimeoutBuilder_ == null) { + return gracefulDecommissionTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : gracefulDecommissionTimeout_; + } else { + return gracefulDecommissionTimeoutBuilder_.getMessage(); + } + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public Builder setGracefulDecommissionTimeout(com.google.protobuf.Duration value) { + if (gracefulDecommissionTimeoutBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + gracefulDecommissionTimeout_ = value; + onChanged(); + } else { + gracefulDecommissionTimeoutBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public Builder setGracefulDecommissionTimeout( + com.google.protobuf.Duration.Builder builderForValue) { + if (gracefulDecommissionTimeoutBuilder_ == null) { + gracefulDecommissionTimeout_ = builderForValue.build(); + onChanged(); + } else { + gracefulDecommissionTimeoutBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public Builder mergeGracefulDecommissionTimeout(com.google.protobuf.Duration value) { + if (gracefulDecommissionTimeoutBuilder_ == null) { + if (gracefulDecommissionTimeout_ != null) { + gracefulDecommissionTimeout_ = + com.google.protobuf.Duration.newBuilder(gracefulDecommissionTimeout_).mergeFrom(value).buildPartial(); + } else { + gracefulDecommissionTimeout_ = value; + } + onChanged(); + } else { + gracefulDecommissionTimeoutBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public Builder clearGracefulDecommissionTimeout() { + if (gracefulDecommissionTimeoutBuilder_ == null) { + gracefulDecommissionTimeout_ = null; + onChanged(); + } else { + gracefulDecommissionTimeout_ = null; + gracefulDecommissionTimeoutBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public com.google.protobuf.Duration.Builder getGracefulDecommissionTimeoutBuilder() { + + onChanged(); + return getGracefulDecommissionTimeoutFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + public com.google.protobuf.DurationOrBuilder getGracefulDecommissionTimeoutOrBuilder() { + if (gracefulDecommissionTimeoutBuilder_ != null) { + return gracefulDecommissionTimeoutBuilder_.getMessageOrBuilder(); + } else { + return gracefulDecommissionTimeout_ == null ? + com.google.protobuf.Duration.getDefaultInstance() : gracefulDecommissionTimeout_; + } + } + /** + *
+     * Optional. Timeout for graceful YARN decomissioning. Graceful
+     * decommissioning allows removing nodes from the cluster without
+     * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+     * in progress to finish before forcefully removing nodes (and potentially
+     * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+     * the maximum allowed timeout is 1 day.
+     * Only supported on Dataproc image versions 1.2 and higher.
+     * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> + getGracefulDecommissionTimeoutFieldBuilder() { + if (gracefulDecommissionTimeoutBuilder_ == null) { + gracefulDecommissionTimeoutBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( + getGracefulDecommissionTimeout(), + getParentForChildren(), + isClean()); + gracefulDecommissionTimeout_ = null; + } + return gracefulDecommissionTimeoutBuilder_; + } + private com.google.protobuf.FieldMask updateMask_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; @@ -1813,6 +2196,135 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { } return updateMaskBuilder_; } + + private java.lang.Object requestId_ = ""; + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 7; + */ + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + requestId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 7; + */ + public com.google.protobuf.ByteString + getRequestIdBytes() { + java.lang.Object ref = requestId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + requestId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 7; + */ + public Builder setRequestId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + requestId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 7; + */ + public Builder clearRequestId() { + + requestId_ = getDefaultInstance().getRequestId(); + onChanged(); + return this; + } + /** + *
+     * Optional. A unique id used to identify the request. If the server
+     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+     * id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+     * backend is returned.
+     * It is recommended to always set this value to a
+     * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). The maximum length is 40 characters.
+     * 
+ * + * string request_id = 7; + */ + public Builder setRequestIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + requestId_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequestOrBuilder.java index 02eae633b09d..30146a038955 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequestOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateClusterRequestOrBuilder.java @@ -88,6 +88,49 @@ public interface UpdateClusterRequestOrBuilder extends */ com.google.cloud.dataproc.v1.ClusterOrBuilder getClusterOrBuilder(); + /** + *
+   * Optional. Timeout for graceful YARN decomissioning. Graceful
+   * decommissioning allows removing nodes from the cluster without
+   * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+   * in progress to finish before forcefully removing nodes (and potentially
+   * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+   * the maximum allowed timeout is 1 day.
+   * Only supported on Dataproc image versions 1.2 and higher.
+   * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + boolean hasGracefulDecommissionTimeout(); + /** + *
+   * Optional. Timeout for graceful YARN decomissioning. Graceful
+   * decommissioning allows removing nodes from the cluster without
+   * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+   * in progress to finish before forcefully removing nodes (and potentially
+   * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+   * the maximum allowed timeout is 1 day.
+   * Only supported on Dataproc image versions 1.2 and higher.
+   * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + com.google.protobuf.Duration getGracefulDecommissionTimeout(); + /** + *
+   * Optional. Timeout for graceful YARN decomissioning. Graceful
+   * decommissioning allows removing nodes from the cluster without
+   * interrupting jobs in progress. Timeout specifies how long to wait for jobs
+   * in progress to finish before forcefully removing nodes (and potentially
+   * interrupting jobs). Default timeout is 0 (for forceful decommission), and
+   * the maximum allowed timeout is 1 day.
+   * Only supported on Dataproc image versions 1.2 and higher.
+   * 
+ * + * .google.protobuf.Duration graceful_decommission_timeout = 6; + */ + com.google.protobuf.DurationOrBuilder getGracefulDecommissionTimeoutOrBuilder(); + /** *
    * Required. Specifies the path, relative to `Cluster`, of
@@ -241,4 +284,38 @@ public interface UpdateClusterRequestOrBuilder extends
    * .google.protobuf.FieldMask update_mask = 4;
    */
   com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder();
+
+  /**
+   * 
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 7; + */ + java.lang.String getRequestId(); + /** + *
+   * Optional. A unique id used to identify the request. If the server
+   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests  with the same
+   * id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
+   * backend is returned.
+   * It is recommended to always set this value to a
+   * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). The maximum length is 40 characters.
+   * 
+ * + * string request_id = 7; + */ + com.google.protobuf.ByteString + getRequestIdBytes(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequest.java new file mode 100644 index 000000000000..99aa46a237ad --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequest.java @@ -0,0 +1,663 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A request to update a workflow template.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest} + */ +public final class UpdateWorkflowTemplateRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) + UpdateWorkflowTemplateRequestOrBuilder { +private static final long serialVersionUID = 0L; + // Use UpdateWorkflowTemplateRequest.newBuilder() to construct. + private UpdateWorkflowTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private UpdateWorkflowTemplateRequest() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UpdateWorkflowTemplateRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder subBuilder = null; + if (template_ != null) { + subBuilder = template_.toBuilder(); + } + template_ = input.readMessage(com.google.cloud.dataproc.v1.WorkflowTemplate.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(template_); + template_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.Builder.class); + } + + public static final int TEMPLATE_FIELD_NUMBER = 1; + private com.google.cloud.dataproc.v1.WorkflowTemplate template_; + /** + *
+   * Required. The updated workflow template.
+   * The `template.version` field must match the current version.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public boolean hasTemplate() { + return template_ != null; + } + /** + *
+   * Required. The updated workflow template.
+   * The `template.version` field must match the current version.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate() { + return template_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } + /** + *
+   * Required. The updated workflow template.
+   * The `template.version` field must match the current version.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder() { + return getTemplate(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (template_ != null) { + output.writeMessage(1, getTemplate()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (template_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getTemplate()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest other = (com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) obj; + + boolean result = true; + result = result && (hasTemplate() == other.hasTemplate()); + if (hasTemplate()) { + result = result && getTemplate() + .equals(other.getTemplate()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasTemplate()) { + hash = (37 * hash) + TEMPLATE_FIELD_NUMBER; + hash = (53 * hash) + getTemplate().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A request to update a workflow template.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (templateBuilder_ == null) { + template_ = null; + } else { + template_ = null; + templateBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest build() { + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest buildPartial() { + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest(this); + if (templateBuilder_ == null) { + result.template_ = template_; + } else { + result.template_ = templateBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) { + return mergeFrom((com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest other) { + if (other == com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest.getDefaultInstance()) return this; + if (other.hasTemplate()) { + mergeTemplate(other.getTemplate()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private com.google.cloud.dataproc.v1.WorkflowTemplate template_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> templateBuilder_; + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public boolean hasTemplate() { + return templateBuilder_ != null || template_ != null; + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate() { + if (templateBuilder_ == null) { + return template_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } else { + return templateBuilder_.getMessage(); + } + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public Builder setTemplate(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + template_ = value; + onChanged(); + } else { + templateBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public Builder setTemplate( + com.google.cloud.dataproc.v1.WorkflowTemplate.Builder builderForValue) { + if (templateBuilder_ == null) { + template_ = builderForValue.build(); + onChanged(); + } else { + templateBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public Builder mergeTemplate(com.google.cloud.dataproc.v1.WorkflowTemplate value) { + if (templateBuilder_ == null) { + if (template_ != null) { + template_ = + com.google.cloud.dataproc.v1.WorkflowTemplate.newBuilder(template_).mergeFrom(value).buildPartial(); + } else { + template_ = value; + } + onChanged(); + } else { + templateBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public Builder clearTemplate() { + if (templateBuilder_ == null) { + template_ = null; + onChanged(); + } else { + template_ = null; + templateBuilder_ = null; + } + + return this; + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplate.Builder getTemplateBuilder() { + + onChanged(); + return getTemplateFieldBuilder().getBuilder(); + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder() { + if (templateBuilder_ != null) { + return templateBuilder_.getMessageOrBuilder(); + } else { + return template_ == null ? + com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance() : template_; + } + } + /** + *
+     * Required. The updated workflow template.
+     * The `template.version` field must match the current version.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder> + getTemplateFieldBuilder() { + if (templateBuilder_ == null) { + templateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplate, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder, com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder>( + getTemplate(), + getParentForChildren(), + isClean()); + template_ = null; + } + return templateBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) + private static final com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest(); + } + + public static com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public UpdateWorkflowTemplateRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UpdateWorkflowTemplateRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequestOrBuilder.java new file mode 100644 index 000000000000..f7f9e9a01abc --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/UpdateWorkflowTemplateRequestOrBuilder.java @@ -0,0 +1,37 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface UpdateWorkflowTemplateRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The updated workflow template.
+   * The `template.version` field must match the current version.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + boolean hasTemplate(); + /** + *
+   * Required. The updated workflow template.
+   * The `template.version` field must match the current version.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + com.google.cloud.dataproc.v1.WorkflowTemplate getTemplate(); + /** + *
+   * Required. The updated workflow template.
+   * The `template.version` field must match the current version.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplate template = 1; + */ + com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder getTemplateOrBuilder(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidation.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidation.java new file mode 100644 index 000000000000..670df9f16adf --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidation.java @@ -0,0 +1,646 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * Validation based on a list of allowed values.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ValueValidation} + */ +public final class ValueValidation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ValueValidation) + ValueValidationOrBuilder { +private static final long serialVersionUID = 0L; + // Use ValueValidation.newBuilder() to construct. + private ValueValidation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ValueValidation() { + values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ValueValidation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + values_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + values_.add(s); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + values_ = values_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ValueValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ValueValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ValueValidation.class, com.google.cloud.dataproc.v1.ValueValidation.Builder.class); + } + + public static final int VALUES_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList values_; + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ProtocolStringList + getValuesList() { + return values_; + } + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public int getValuesCount() { + return values_.size(); + } + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public java.lang.String getValues(int index) { + return values_.get(index); + } + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ByteString + getValuesBytes(int index) { + return values_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < values_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, values_.getRaw(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < values_.size(); i++) { + dataSize += computeStringSizeNoTag(values_.getRaw(i)); + } + size += dataSize; + size += 1 * getValuesList().size(); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.ValueValidation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.ValueValidation other = (com.google.cloud.dataproc.v1.ValueValidation) obj; + + boolean result = true; + result = result && getValuesList() + .equals(other.getValuesList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getValuesCount() > 0) { + hash = (37 * hash) + VALUES_FIELD_NUMBER; + hash = (53 * hash) + getValuesList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.ValueValidation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.ValueValidation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.ValueValidation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ValueValidation) + com.google.cloud.dataproc.v1.ValueValidationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ValueValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ValueValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.ValueValidation.class, com.google.cloud.dataproc.v1.ValueValidation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.ValueValidation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_ValueValidation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ValueValidation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ValueValidation build() { + com.google.cloud.dataproc.v1.ValueValidation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ValueValidation buildPartial() { + com.google.cloud.dataproc.v1.ValueValidation result = new com.google.cloud.dataproc.v1.ValueValidation(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + values_ = values_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.values_ = values_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.ValueValidation) { + return mergeFrom((com.google.cloud.dataproc.v1.ValueValidation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.ValueValidation other) { + if (other == com.google.cloud.dataproc.v1.ValueValidation.getDefaultInstance()) return this; + if (!other.values_.isEmpty()) { + if (values_.isEmpty()) { + values_ = other.values_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureValuesIsMutable(); + values_.addAll(other.values_); + } + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.ValueValidation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.ValueValidation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringList values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureValuesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + values_ = new com.google.protobuf.LazyStringArrayList(values_); + bitField0_ |= 0x00000001; + } + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ProtocolStringList + getValuesList() { + return values_.getUnmodifiableView(); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public int getValuesCount() { + return values_.size(); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public java.lang.String getValues(int index) { + return values_.get(index); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ByteString + getValuesBytes(int index) { + return values_.getByteString(index); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder setValues( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureValuesIsMutable(); + values_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder addValues( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureValuesIsMutable(); + values_.add(value); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder addAllValues( + java.lang.Iterable values) { + ensureValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, values_); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder clearValues() { + values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder addValuesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureValuesIsMutable(); + values_.add(value); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ValueValidation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ValueValidation) + private static final com.google.cloud.dataproc.v1.ValueValidation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ValueValidation(); + } + + public static com.google.cloud.dataproc.v1.ValueValidation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ValueValidation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ValueValidation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.ValueValidation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidationOrBuilder.java new file mode 100644 index 000000000000..16daa5ab6c80 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ValueValidationOrBuilder.java @@ -0,0 +1,44 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface ValueValidationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.ValueValidation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + java.util.List + getValuesList(); + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + int getValuesCount(); + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + java.lang.String getValues(int index); + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + com.google.protobuf.ByteString + getValuesBytes(int index); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraph.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraph.java new file mode 100644 index 000000000000..ec01eedd864b --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraph.java @@ -0,0 +1,859 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * The workflow graph.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowGraph} + */ +public final class WorkflowGraph extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.WorkflowGraph) + WorkflowGraphOrBuilder { +private static final long serialVersionUID = 0L; + // Use WorkflowGraph.newBuilder() to construct. + private WorkflowGraph(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private WorkflowGraph() { + nodes_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WorkflowGraph( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + nodes_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + nodes_.add( + input.readMessage(com.google.cloud.dataproc.v1.WorkflowNode.parser(), extensionRegistry)); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + nodes_ = java.util.Collections.unmodifiableList(nodes_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowGraph_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowGraph_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowGraph.class, com.google.cloud.dataproc.v1.WorkflowGraph.Builder.class); + } + + public static final int NODES_FIELD_NUMBER = 1; + private java.util.List nodes_; + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public java.util.List getNodesList() { + return nodes_; + } + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public java.util.List + getNodesOrBuilderList() { + return nodes_; + } + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public int getNodesCount() { + return nodes_.size(); + } + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNode getNodes(int index) { + return nodes_.get(index); + } + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder getNodesOrBuilder( + int index) { + return nodes_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < nodes_.size(); i++) { + output.writeMessage(1, nodes_.get(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < nodes_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, nodes_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.WorkflowGraph)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.WorkflowGraph other = (com.google.cloud.dataproc.v1.WorkflowGraph) obj; + + boolean result = true; + result = result && getNodesList() + .equals(other.getNodesList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getNodesCount() > 0) { + hash = (37 * hash) + NODES_FIELD_NUMBER; + hash = (53 * hash) + getNodesList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowGraph parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.WorkflowGraph prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * The workflow graph.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowGraph} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.WorkflowGraph) + com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowGraph_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowGraph_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowGraph.class, com.google.cloud.dataproc.v1.WorkflowGraph.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.WorkflowGraph.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getNodesFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (nodesBuilder_ == null) { + nodes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + nodesBuilder_.clear(); + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowGraph_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowGraph getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.WorkflowGraph.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowGraph build() { + com.google.cloud.dataproc.v1.WorkflowGraph result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowGraph buildPartial() { + com.google.cloud.dataproc.v1.WorkflowGraph result = new com.google.cloud.dataproc.v1.WorkflowGraph(this); + int from_bitField0_ = bitField0_; + if (nodesBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + nodes_ = java.util.Collections.unmodifiableList(nodes_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.nodes_ = nodes_; + } else { + result.nodes_ = nodesBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.WorkflowGraph) { + return mergeFrom((com.google.cloud.dataproc.v1.WorkflowGraph)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.WorkflowGraph other) { + if (other == com.google.cloud.dataproc.v1.WorkflowGraph.getDefaultInstance()) return this; + if (nodesBuilder_ == null) { + if (!other.nodes_.isEmpty()) { + if (nodes_.isEmpty()) { + nodes_ = other.nodes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureNodesIsMutable(); + nodes_.addAll(other.nodes_); + } + onChanged(); + } + } else { + if (!other.nodes_.isEmpty()) { + if (nodesBuilder_.isEmpty()) { + nodesBuilder_.dispose(); + nodesBuilder_ = null; + nodes_ = other.nodes_; + bitField0_ = (bitField0_ & ~0x00000001); + nodesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getNodesFieldBuilder() : null; + } else { + nodesBuilder_.addAllMessages(other.nodes_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.WorkflowGraph parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.WorkflowGraph) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List nodes_ = + java.util.Collections.emptyList(); + private void ensureNodesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + nodes_ = new java.util.ArrayList(nodes_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowNode, com.google.cloud.dataproc.v1.WorkflowNode.Builder, com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder> nodesBuilder_; + + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public java.util.List getNodesList() { + if (nodesBuilder_ == null) { + return java.util.Collections.unmodifiableList(nodes_); + } else { + return nodesBuilder_.getMessageList(); + } + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public int getNodesCount() { + if (nodesBuilder_ == null) { + return nodes_.size(); + } else { + return nodesBuilder_.getCount(); + } + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNode getNodes(int index) { + if (nodesBuilder_ == null) { + return nodes_.get(index); + } else { + return nodesBuilder_.getMessage(index); + } + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder setNodes( + int index, com.google.cloud.dataproc.v1.WorkflowNode value) { + if (nodesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodesIsMutable(); + nodes_.set(index, value); + onChanged(); + } else { + nodesBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder setNodes( + int index, com.google.cloud.dataproc.v1.WorkflowNode.Builder builderForValue) { + if (nodesBuilder_ == null) { + ensureNodesIsMutable(); + nodes_.set(index, builderForValue.build()); + onChanged(); + } else { + nodesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder addNodes(com.google.cloud.dataproc.v1.WorkflowNode value) { + if (nodesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodesIsMutable(); + nodes_.add(value); + onChanged(); + } else { + nodesBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder addNodes( + int index, com.google.cloud.dataproc.v1.WorkflowNode value) { + if (nodesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodesIsMutable(); + nodes_.add(index, value); + onChanged(); + } else { + nodesBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder addNodes( + com.google.cloud.dataproc.v1.WorkflowNode.Builder builderForValue) { + if (nodesBuilder_ == null) { + ensureNodesIsMutable(); + nodes_.add(builderForValue.build()); + onChanged(); + } else { + nodesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder addNodes( + int index, com.google.cloud.dataproc.v1.WorkflowNode.Builder builderForValue) { + if (nodesBuilder_ == null) { + ensureNodesIsMutable(); + nodes_.add(index, builderForValue.build()); + onChanged(); + } else { + nodesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder addAllNodes( + java.lang.Iterable values) { + if (nodesBuilder_ == null) { + ensureNodesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, nodes_); + onChanged(); + } else { + nodesBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder clearNodes() { + if (nodesBuilder_ == null) { + nodes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + nodesBuilder_.clear(); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public Builder removeNodes(int index) { + if (nodesBuilder_ == null) { + ensureNodesIsMutable(); + nodes_.remove(index); + onChanged(); + } else { + nodesBuilder_.remove(index); + } + return this; + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNode.Builder getNodesBuilder( + int index) { + return getNodesFieldBuilder().getBuilder(index); + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder getNodesOrBuilder( + int index) { + if (nodesBuilder_ == null) { + return nodes_.get(index); } else { + return nodesBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public java.util.List + getNodesOrBuilderList() { + if (nodesBuilder_ != null) { + return nodesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(nodes_); + } + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNode.Builder addNodesBuilder() { + return getNodesFieldBuilder().addBuilder( + com.google.cloud.dataproc.v1.WorkflowNode.getDefaultInstance()); + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public com.google.cloud.dataproc.v1.WorkflowNode.Builder addNodesBuilder( + int index) { + return getNodesFieldBuilder().addBuilder( + index, com.google.cloud.dataproc.v1.WorkflowNode.getDefaultInstance()); + } + /** + *
+     * Output only. The workflow nodes.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + public java.util.List + getNodesBuilderList() { + return getNodesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowNode, com.google.cloud.dataproc.v1.WorkflowNode.Builder, com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder> + getNodesFieldBuilder() { + if (nodesBuilder_ == null) { + nodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowNode, com.google.cloud.dataproc.v1.WorkflowNode.Builder, com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder>( + nodes_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + nodes_ = null; + } + return nodesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.WorkflowGraph) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowGraph) + private static final com.google.cloud.dataproc.v1.WorkflowGraph DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.WorkflowGraph(); + } + + public static com.google.cloud.dataproc.v1.WorkflowGraph getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public WorkflowGraph parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WorkflowGraph(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowGraph getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraphOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraphOrBuilder.java new file mode 100644 index 000000000000..9ba974c949b9 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowGraphOrBuilder.java @@ -0,0 +1,53 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface WorkflowGraphOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.WorkflowGraph) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + java.util.List + getNodesList(); + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + com.google.cloud.dataproc.v1.WorkflowNode getNodes(int index); + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + int getNodesCount(); + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + java.util.List + getNodesOrBuilderList(); + /** + *
+   * Output only. The workflow nodes.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.WorkflowNode nodes = 1; + */ + com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder getNodesOrBuilder( + int index); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadata.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadata.java new file mode 100644 index 000000000000..3a2af1e75c0d --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadata.java @@ -0,0 +1,2697 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A Cloud Dataproc workflow template resource.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowMetadata} + */ +public final class WorkflowMetadata extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.WorkflowMetadata) + WorkflowMetadataOrBuilder { +private static final long serialVersionUID = 0L; + // Use WorkflowMetadata.newBuilder() to construct. + private WorkflowMetadata(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private WorkflowMetadata() { + template_ = ""; + version_ = 0; + state_ = 0; + clusterName_ = ""; + clusterUuid_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WorkflowMetadata( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + template_ = s; + break; + } + case 16: { + + version_ = input.readInt32(); + break; + } + case 26: { + com.google.cloud.dataproc.v1.ClusterOperation.Builder subBuilder = null; + if (createCluster_ != null) { + subBuilder = createCluster_.toBuilder(); + } + createCluster_ = input.readMessage(com.google.cloud.dataproc.v1.ClusterOperation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(createCluster_); + createCluster_ = subBuilder.buildPartial(); + } + + break; + } + case 34: { + com.google.cloud.dataproc.v1.WorkflowGraph.Builder subBuilder = null; + if (graph_ != null) { + subBuilder = graph_.toBuilder(); + } + graph_ = input.readMessage(com.google.cloud.dataproc.v1.WorkflowGraph.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(graph_); + graph_ = subBuilder.buildPartial(); + } + + break; + } + case 42: { + com.google.cloud.dataproc.v1.ClusterOperation.Builder subBuilder = null; + if (deleteCluster_ != null) { + subBuilder = deleteCluster_.toBuilder(); + } + deleteCluster_ = input.readMessage(com.google.cloud.dataproc.v1.ClusterOperation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(deleteCluster_); + deleteCluster_ = subBuilder.buildPartial(); + } + + break; + } + case 48: { + int rawValue = input.readEnum(); + + state_ = rawValue; + break; + } + case 58: { + java.lang.String s = input.readStringRequireUtf8(); + + clusterName_ = s; + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + parameters_ = com.google.protobuf.MapField.newMapField( + ParametersDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000080; + } + com.google.protobuf.MapEntry + parameters__ = input.readMessage( + ParametersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + parameters_.getMutableMap().put( + parameters__.getKey(), parameters__.getValue()); + break; + } + case 74: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (startTime_ != null) { + subBuilder = startTime_.toBuilder(); + } + startTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(startTime_); + startTime_ = subBuilder.buildPartial(); + } + + break; + } + case 82: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (endTime_ != null) { + subBuilder = endTime_.toBuilder(); + } + endTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(endTime_); + endTime_ = subBuilder.buildPartial(); + } + + break; + } + case 90: { + java.lang.String s = input.readStringRequireUtf8(); + + clusterUuid_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowMetadata_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 8: + return internalGetParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowMetadata_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowMetadata.class, com.google.cloud.dataproc.v1.WorkflowMetadata.Builder.class); + } + + /** + *
+   * The operation state.
+   * 
+ * + * Protobuf enum {@code google.cloud.dataproc.v1.WorkflowMetadata.State} + */ + public enum State + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * Unused.
+     * 
+ * + * UNKNOWN = 0; + */ + UNKNOWN(0), + /** + *
+     * The operation has been created.
+     * 
+ * + * PENDING = 1; + */ + PENDING(1), + /** + *
+     * The operation is running.
+     * 
+ * + * RUNNING = 2; + */ + RUNNING(2), + /** + *
+     * The operation is done; either cancelled or completed.
+     * 
+ * + * DONE = 3; + */ + DONE(3), + UNRECOGNIZED(-1), + ; + + /** + *
+     * Unused.
+     * 
+ * + * UNKNOWN = 0; + */ + public static final int UNKNOWN_VALUE = 0; + /** + *
+     * The operation has been created.
+     * 
+ * + * PENDING = 1; + */ + public static final int PENDING_VALUE = 1; + /** + *
+     * The operation is running.
+     * 
+ * + * RUNNING = 2; + */ + public static final int RUNNING_VALUE = 2; + /** + *
+     * The operation is done; either cancelled or completed.
+     * 
+ * + * DONE = 3; + */ + public static final int DONE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { + switch (value) { + case 0: return UNKNOWN; + case 1: return PENDING; + case 2: return RUNNING; + case 3: return DONE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowMetadata.getDescriptor().getEnumTypes().get(0); + } + + private static final State[] VALUES = values(); + + public static State valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private State(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.dataproc.v1.WorkflowMetadata.State) + } + + private int bitField0_; + public static final int TEMPLATE_FIELD_NUMBER = 1; + private volatile java.lang.Object template_; + /** + *
+   * Output only. The "resource name" of the template.
+   * 
+ * + * string template = 1; + */ + public java.lang.String getTemplate() { + java.lang.Object ref = template_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + template_ = s; + return s; + } + } + /** + *
+   * Output only. The "resource name" of the template.
+   * 
+ * + * string template = 1; + */ + public com.google.protobuf.ByteString + getTemplateBytes() { + java.lang.Object ref = template_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + template_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VERSION_FIELD_NUMBER = 2; + private int version_; + /** + *
+   * Output only. The version of template at the time of
+   * workflow instantiation.
+   * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + + public static final int CREATE_CLUSTER_FIELD_NUMBER = 3; + private com.google.cloud.dataproc.v1.ClusterOperation createCluster_; + /** + *
+   * Output only. The create cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public boolean hasCreateCluster() { + return createCluster_ != null; + } + /** + *
+   * Output only. The create cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public com.google.cloud.dataproc.v1.ClusterOperation getCreateCluster() { + return createCluster_ == null ? com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance() : createCluster_; + } + /** + *
+   * Output only. The create cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public com.google.cloud.dataproc.v1.ClusterOperationOrBuilder getCreateClusterOrBuilder() { + return getCreateCluster(); + } + + public static final int GRAPH_FIELD_NUMBER = 4; + private com.google.cloud.dataproc.v1.WorkflowGraph graph_; + /** + *
+   * Output only. The workflow graph.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public boolean hasGraph() { + return graph_ != null; + } + /** + *
+   * Output only. The workflow graph.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public com.google.cloud.dataproc.v1.WorkflowGraph getGraph() { + return graph_ == null ? com.google.cloud.dataproc.v1.WorkflowGraph.getDefaultInstance() : graph_; + } + /** + *
+   * Output only. The workflow graph.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder getGraphOrBuilder() { + return getGraph(); + } + + public static final int DELETE_CLUSTER_FIELD_NUMBER = 5; + private com.google.cloud.dataproc.v1.ClusterOperation deleteCluster_; + /** + *
+   * Output only. The delete cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public boolean hasDeleteCluster() { + return deleteCluster_ != null; + } + /** + *
+   * Output only. The delete cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public com.google.cloud.dataproc.v1.ClusterOperation getDeleteCluster() { + return deleteCluster_ == null ? com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance() : deleteCluster_; + } + /** + *
+   * Output only. The delete cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public com.google.cloud.dataproc.v1.ClusterOperationOrBuilder getDeleteClusterOrBuilder() { + return getDeleteCluster(); + } + + public static final int STATE_FIELD_NUMBER = 6; + private int state_; + /** + *
+   * Output only. The workflow state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public int getStateValue() { + return state_; + } + /** + *
+   * Output only. The workflow state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public com.google.cloud.dataproc.v1.WorkflowMetadata.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.dataproc.v1.WorkflowMetadata.State result = com.google.cloud.dataproc.v1.WorkflowMetadata.State.valueOf(state_); + return result == null ? com.google.cloud.dataproc.v1.WorkflowMetadata.State.UNRECOGNIZED : result; + } + + public static final int CLUSTER_NAME_FIELD_NUMBER = 7; + private volatile java.lang.Object clusterName_; + /** + *
+   * Output only. The name of the target cluster.
+   * 
+ * + * string cluster_name = 7; + */ + public java.lang.String getClusterName() { + java.lang.Object ref = clusterName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterName_ = s; + return s; + } + } + /** + *
+   * Output only. The name of the target cluster.
+   * 
+ * + * string cluster_name = 7; + */ + public com.google.protobuf.ByteString + getClusterNameBytes() { + java.lang.Object ref = clusterName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PARAMETERS_FIELD_NUMBER = 8; + private static final class ParametersDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowMetadata_ParametersEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> parameters_; + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + + public boolean containsParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetParameters().getMap().containsKey(key); + } + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + + public java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + + public java.lang.String getParametersOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int START_TIME_FIELD_NUMBER = 9; + private com.google.protobuf.Timestamp startTime_; + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public boolean hasStartTime() { + return startTime_ != null; + } + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.Timestamp getStartTime() { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + return getStartTime(); + } + + public static final int END_TIME_FIELD_NUMBER = 10; + private com.google.protobuf.Timestamp endTime_; + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public boolean hasEndTime() { + return endTime_ != null; + } + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.Timestamp getEndTime() { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + return getEndTime(); + } + + public static final int CLUSTER_UUID_FIELD_NUMBER = 11; + private volatile java.lang.Object clusterUuid_; + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + public java.lang.String getClusterUuid() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterUuid_ = s; + return s; + } + } + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + public com.google.protobuf.ByteString + getClusterUuidBytes() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getTemplateBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, template_); + } + if (version_ != 0) { + output.writeInt32(2, version_); + } + if (createCluster_ != null) { + output.writeMessage(3, getCreateCluster()); + } + if (graph_ != null) { + output.writeMessage(4, getGraph()); + } + if (deleteCluster_ != null) { + output.writeMessage(5, getDeleteCluster()); + } + if (state_ != com.google.cloud.dataproc.v1.WorkflowMetadata.State.UNKNOWN.getNumber()) { + output.writeEnum(6, state_); + } + if (!getClusterNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, clusterName_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetParameters(), + ParametersDefaultEntryHolder.defaultEntry, + 8); + if (startTime_ != null) { + output.writeMessage(9, getStartTime()); + } + if (endTime_ != null) { + output.writeMessage(10, getEndTime()); + } + if (!getClusterUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 11, clusterUuid_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getTemplateBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, template_); + } + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, version_); + } + if (createCluster_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getCreateCluster()); + } + if (graph_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getGraph()); + } + if (deleteCluster_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getDeleteCluster()); + } + if (state_ != com.google.cloud.dataproc.v1.WorkflowMetadata.State.UNKNOWN.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(6, state_); + } + if (!getClusterNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, clusterName_); + } + for (java.util.Map.Entry entry + : internalGetParameters().getMap().entrySet()) { + com.google.protobuf.MapEntry + parameters__ = ParametersDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, parameters__); + } + if (startTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(9, getStartTime()); + } + if (endTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, getEndTime()); + } + if (!getClusterUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, clusterUuid_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.WorkflowMetadata)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.WorkflowMetadata other = (com.google.cloud.dataproc.v1.WorkflowMetadata) obj; + + boolean result = true; + result = result && getTemplate() + .equals(other.getTemplate()); + result = result && (getVersion() + == other.getVersion()); + result = result && (hasCreateCluster() == other.hasCreateCluster()); + if (hasCreateCluster()) { + result = result && getCreateCluster() + .equals(other.getCreateCluster()); + } + result = result && (hasGraph() == other.hasGraph()); + if (hasGraph()) { + result = result && getGraph() + .equals(other.getGraph()); + } + result = result && (hasDeleteCluster() == other.hasDeleteCluster()); + if (hasDeleteCluster()) { + result = result && getDeleteCluster() + .equals(other.getDeleteCluster()); + } + result = result && state_ == other.state_; + result = result && getClusterName() + .equals(other.getClusterName()); + result = result && internalGetParameters().equals( + other.internalGetParameters()); + result = result && (hasStartTime() == other.hasStartTime()); + if (hasStartTime()) { + result = result && getStartTime() + .equals(other.getStartTime()); + } + result = result && (hasEndTime() == other.hasEndTime()); + if (hasEndTime()) { + result = result && getEndTime() + .equals(other.getEndTime()); + } + result = result && getClusterUuid() + .equals(other.getClusterUuid()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TEMPLATE_FIELD_NUMBER; + hash = (53 * hash) + getTemplate().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + if (hasCreateCluster()) { + hash = (37 * hash) + CREATE_CLUSTER_FIELD_NUMBER; + hash = (53 * hash) + getCreateCluster().hashCode(); + } + if (hasGraph()) { + hash = (37 * hash) + GRAPH_FIELD_NUMBER; + hash = (53 * hash) + getGraph().hashCode(); + } + if (hasDeleteCluster()) { + hash = (37 * hash) + DELETE_CLUSTER_FIELD_NUMBER; + hash = (53 * hash) + getDeleteCluster().hashCode(); + } + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + state_; + hash = (37 * hash) + CLUSTER_NAME_FIELD_NUMBER; + hash = (53 * hash) + getClusterName().hashCode(); + if (!internalGetParameters().getMap().isEmpty()) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + internalGetParameters().hashCode(); + } + if (hasStartTime()) { + hash = (37 * hash) + START_TIME_FIELD_NUMBER; + hash = (53 * hash) + getStartTime().hashCode(); + } + if (hasEndTime()) { + hash = (37 * hash) + END_TIME_FIELD_NUMBER; + hash = (53 * hash) + getEndTime().hashCode(); + } + hash = (37 * hash) + CLUSTER_UUID_FIELD_NUMBER; + hash = (53 * hash) + getClusterUuid().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowMetadata parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.WorkflowMetadata prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A Cloud Dataproc workflow template resource.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowMetadata} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.WorkflowMetadata) + com.google.cloud.dataproc.v1.WorkflowMetadataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowMetadata_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 8: + return internalGetParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 8: + return internalGetMutableParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowMetadata_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowMetadata.class, com.google.cloud.dataproc.v1.WorkflowMetadata.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.WorkflowMetadata.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + template_ = ""; + + version_ = 0; + + if (createClusterBuilder_ == null) { + createCluster_ = null; + } else { + createCluster_ = null; + createClusterBuilder_ = null; + } + if (graphBuilder_ == null) { + graph_ = null; + } else { + graph_ = null; + graphBuilder_ = null; + } + if (deleteClusterBuilder_ == null) { + deleteCluster_ = null; + } else { + deleteCluster_ = null; + deleteClusterBuilder_ = null; + } + state_ = 0; + + clusterName_ = ""; + + internalGetMutableParameters().clear(); + if (startTimeBuilder_ == null) { + startTime_ = null; + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + if (endTimeBuilder_ == null) { + endTime_ = null; + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + clusterUuid_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowMetadata_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowMetadata getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.WorkflowMetadata.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowMetadata build() { + com.google.cloud.dataproc.v1.WorkflowMetadata result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowMetadata buildPartial() { + com.google.cloud.dataproc.v1.WorkflowMetadata result = new com.google.cloud.dataproc.v1.WorkflowMetadata(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.template_ = template_; + result.version_ = version_; + if (createClusterBuilder_ == null) { + result.createCluster_ = createCluster_; + } else { + result.createCluster_ = createClusterBuilder_.build(); + } + if (graphBuilder_ == null) { + result.graph_ = graph_; + } else { + result.graph_ = graphBuilder_.build(); + } + if (deleteClusterBuilder_ == null) { + result.deleteCluster_ = deleteCluster_; + } else { + result.deleteCluster_ = deleteClusterBuilder_.build(); + } + result.state_ = state_; + result.clusterName_ = clusterName_; + result.parameters_ = internalGetParameters(); + result.parameters_.makeImmutable(); + if (startTimeBuilder_ == null) { + result.startTime_ = startTime_; + } else { + result.startTime_ = startTimeBuilder_.build(); + } + if (endTimeBuilder_ == null) { + result.endTime_ = endTime_; + } else { + result.endTime_ = endTimeBuilder_.build(); + } + result.clusterUuid_ = clusterUuid_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.WorkflowMetadata) { + return mergeFrom((com.google.cloud.dataproc.v1.WorkflowMetadata)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.WorkflowMetadata other) { + if (other == com.google.cloud.dataproc.v1.WorkflowMetadata.getDefaultInstance()) return this; + if (!other.getTemplate().isEmpty()) { + template_ = other.template_; + onChanged(); + } + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + if (other.hasCreateCluster()) { + mergeCreateCluster(other.getCreateCluster()); + } + if (other.hasGraph()) { + mergeGraph(other.getGraph()); + } + if (other.hasDeleteCluster()) { + mergeDeleteCluster(other.getDeleteCluster()); + } + if (other.state_ != 0) { + setStateValue(other.getStateValue()); + } + if (!other.getClusterName().isEmpty()) { + clusterName_ = other.clusterName_; + onChanged(); + } + internalGetMutableParameters().mergeFrom( + other.internalGetParameters()); + if (other.hasStartTime()) { + mergeStartTime(other.getStartTime()); + } + if (other.hasEndTime()) { + mergeEndTime(other.getEndTime()); + } + if (!other.getClusterUuid().isEmpty()) { + clusterUuid_ = other.clusterUuid_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.WorkflowMetadata parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.WorkflowMetadata) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object template_ = ""; + /** + *
+     * Output only. The "resource name" of the template.
+     * 
+ * + * string template = 1; + */ + public java.lang.String getTemplate() { + java.lang.Object ref = template_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + template_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The "resource name" of the template.
+     * 
+ * + * string template = 1; + */ + public com.google.protobuf.ByteString + getTemplateBytes() { + java.lang.Object ref = template_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + template_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The "resource name" of the template.
+     * 
+ * + * string template = 1; + */ + public Builder setTemplate( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + template_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The "resource name" of the template.
+     * 
+ * + * string template = 1; + */ + public Builder clearTemplate() { + + template_ = getDefaultInstance().getTemplate(); + onChanged(); + return this; + } + /** + *
+     * Output only. The "resource name" of the template.
+     * 
+ * + * string template = 1; + */ + public Builder setTemplateBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + template_ = value; + onChanged(); + return this; + } + + private int version_ ; + /** + *
+     * Output only. The version of template at the time of
+     * workflow instantiation.
+     * 
+ * + * int32 version = 2; + */ + public int getVersion() { + return version_; + } + /** + *
+     * Output only. The version of template at the time of
+     * workflow instantiation.
+     * 
+ * + * int32 version = 2; + */ + public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The version of template at the time of
+     * workflow instantiation.
+     * 
+ * + * int32 version = 2; + */ + public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + + private com.google.cloud.dataproc.v1.ClusterOperation createCluster_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterOperation, com.google.cloud.dataproc.v1.ClusterOperation.Builder, com.google.cloud.dataproc.v1.ClusterOperationOrBuilder> createClusterBuilder_; + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public boolean hasCreateCluster() { + return createClusterBuilder_ != null || createCluster_ != null; + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public com.google.cloud.dataproc.v1.ClusterOperation getCreateCluster() { + if (createClusterBuilder_ == null) { + return createCluster_ == null ? com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance() : createCluster_; + } else { + return createClusterBuilder_.getMessage(); + } + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public Builder setCreateCluster(com.google.cloud.dataproc.v1.ClusterOperation value) { + if (createClusterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createCluster_ = value; + onChanged(); + } else { + createClusterBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public Builder setCreateCluster( + com.google.cloud.dataproc.v1.ClusterOperation.Builder builderForValue) { + if (createClusterBuilder_ == null) { + createCluster_ = builderForValue.build(); + onChanged(); + } else { + createClusterBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public Builder mergeCreateCluster(com.google.cloud.dataproc.v1.ClusterOperation value) { + if (createClusterBuilder_ == null) { + if (createCluster_ != null) { + createCluster_ = + com.google.cloud.dataproc.v1.ClusterOperation.newBuilder(createCluster_).mergeFrom(value).buildPartial(); + } else { + createCluster_ = value; + } + onChanged(); + } else { + createClusterBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public Builder clearCreateCluster() { + if (createClusterBuilder_ == null) { + createCluster_ = null; + onChanged(); + } else { + createCluster_ = null; + createClusterBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public com.google.cloud.dataproc.v1.ClusterOperation.Builder getCreateClusterBuilder() { + + onChanged(); + return getCreateClusterFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + public com.google.cloud.dataproc.v1.ClusterOperationOrBuilder getCreateClusterOrBuilder() { + if (createClusterBuilder_ != null) { + return createClusterBuilder_.getMessageOrBuilder(); + } else { + return createCluster_ == null ? + com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance() : createCluster_; + } + } + /** + *
+     * Output only. The create cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterOperation, com.google.cloud.dataproc.v1.ClusterOperation.Builder, com.google.cloud.dataproc.v1.ClusterOperationOrBuilder> + getCreateClusterFieldBuilder() { + if (createClusterBuilder_ == null) { + createClusterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterOperation, com.google.cloud.dataproc.v1.ClusterOperation.Builder, com.google.cloud.dataproc.v1.ClusterOperationOrBuilder>( + getCreateCluster(), + getParentForChildren(), + isClean()); + createCluster_ = null; + } + return createClusterBuilder_; + } + + private com.google.cloud.dataproc.v1.WorkflowGraph graph_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowGraph, com.google.cloud.dataproc.v1.WorkflowGraph.Builder, com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder> graphBuilder_; + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public boolean hasGraph() { + return graphBuilder_ != null || graph_ != null; + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public com.google.cloud.dataproc.v1.WorkflowGraph getGraph() { + if (graphBuilder_ == null) { + return graph_ == null ? com.google.cloud.dataproc.v1.WorkflowGraph.getDefaultInstance() : graph_; + } else { + return graphBuilder_.getMessage(); + } + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public Builder setGraph(com.google.cloud.dataproc.v1.WorkflowGraph value) { + if (graphBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + graph_ = value; + onChanged(); + } else { + graphBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public Builder setGraph( + com.google.cloud.dataproc.v1.WorkflowGraph.Builder builderForValue) { + if (graphBuilder_ == null) { + graph_ = builderForValue.build(); + onChanged(); + } else { + graphBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public Builder mergeGraph(com.google.cloud.dataproc.v1.WorkflowGraph value) { + if (graphBuilder_ == null) { + if (graph_ != null) { + graph_ = + com.google.cloud.dataproc.v1.WorkflowGraph.newBuilder(graph_).mergeFrom(value).buildPartial(); + } else { + graph_ = value; + } + onChanged(); + } else { + graphBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public Builder clearGraph() { + if (graphBuilder_ == null) { + graph_ = null; + onChanged(); + } else { + graph_ = null; + graphBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public com.google.cloud.dataproc.v1.WorkflowGraph.Builder getGraphBuilder() { + + onChanged(); + return getGraphFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + public com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder getGraphOrBuilder() { + if (graphBuilder_ != null) { + return graphBuilder_.getMessageOrBuilder(); + } else { + return graph_ == null ? + com.google.cloud.dataproc.v1.WorkflowGraph.getDefaultInstance() : graph_; + } + } + /** + *
+     * Output only. The workflow graph.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowGraph, com.google.cloud.dataproc.v1.WorkflowGraph.Builder, com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder> + getGraphFieldBuilder() { + if (graphBuilder_ == null) { + graphBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowGraph, com.google.cloud.dataproc.v1.WorkflowGraph.Builder, com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder>( + getGraph(), + getParentForChildren(), + isClean()); + graph_ = null; + } + return graphBuilder_; + } + + private com.google.cloud.dataproc.v1.ClusterOperation deleteCluster_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterOperation, com.google.cloud.dataproc.v1.ClusterOperation.Builder, com.google.cloud.dataproc.v1.ClusterOperationOrBuilder> deleteClusterBuilder_; + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public boolean hasDeleteCluster() { + return deleteClusterBuilder_ != null || deleteCluster_ != null; + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public com.google.cloud.dataproc.v1.ClusterOperation getDeleteCluster() { + if (deleteClusterBuilder_ == null) { + return deleteCluster_ == null ? com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance() : deleteCluster_; + } else { + return deleteClusterBuilder_.getMessage(); + } + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public Builder setDeleteCluster(com.google.cloud.dataproc.v1.ClusterOperation value) { + if (deleteClusterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + deleteCluster_ = value; + onChanged(); + } else { + deleteClusterBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public Builder setDeleteCluster( + com.google.cloud.dataproc.v1.ClusterOperation.Builder builderForValue) { + if (deleteClusterBuilder_ == null) { + deleteCluster_ = builderForValue.build(); + onChanged(); + } else { + deleteClusterBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public Builder mergeDeleteCluster(com.google.cloud.dataproc.v1.ClusterOperation value) { + if (deleteClusterBuilder_ == null) { + if (deleteCluster_ != null) { + deleteCluster_ = + com.google.cloud.dataproc.v1.ClusterOperation.newBuilder(deleteCluster_).mergeFrom(value).buildPartial(); + } else { + deleteCluster_ = value; + } + onChanged(); + } else { + deleteClusterBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public Builder clearDeleteCluster() { + if (deleteClusterBuilder_ == null) { + deleteCluster_ = null; + onChanged(); + } else { + deleteCluster_ = null; + deleteClusterBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public com.google.cloud.dataproc.v1.ClusterOperation.Builder getDeleteClusterBuilder() { + + onChanged(); + return getDeleteClusterFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + public com.google.cloud.dataproc.v1.ClusterOperationOrBuilder getDeleteClusterOrBuilder() { + if (deleteClusterBuilder_ != null) { + return deleteClusterBuilder_.getMessageOrBuilder(); + } else { + return deleteCluster_ == null ? + com.google.cloud.dataproc.v1.ClusterOperation.getDefaultInstance() : deleteCluster_; + } + } + /** + *
+     * Output only. The delete cluster operation metadata.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterOperation, com.google.cloud.dataproc.v1.ClusterOperation.Builder, com.google.cloud.dataproc.v1.ClusterOperationOrBuilder> + getDeleteClusterFieldBuilder() { + if (deleteClusterBuilder_ == null) { + deleteClusterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterOperation, com.google.cloud.dataproc.v1.ClusterOperation.Builder, com.google.cloud.dataproc.v1.ClusterOperationOrBuilder>( + getDeleteCluster(), + getParentForChildren(), + isClean()); + deleteCluster_ = null; + } + return deleteClusterBuilder_; + } + + private int state_ = 0; + /** + *
+     * Output only. The workflow state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public int getStateValue() { + return state_; + } + /** + *
+     * Output only. The workflow state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public Builder setStateValue(int value) { + state_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The workflow state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public com.google.cloud.dataproc.v1.WorkflowMetadata.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.dataproc.v1.WorkflowMetadata.State result = com.google.cloud.dataproc.v1.WorkflowMetadata.State.valueOf(state_); + return result == null ? com.google.cloud.dataproc.v1.WorkflowMetadata.State.UNRECOGNIZED : result; + } + /** + *
+     * Output only. The workflow state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public Builder setState(com.google.cloud.dataproc.v1.WorkflowMetadata.State value) { + if (value == null) { + throw new NullPointerException(); + } + + state_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+     * Output only. The workflow state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + public Builder clearState() { + + state_ = 0; + onChanged(); + return this; + } + + private java.lang.Object clusterName_ = ""; + /** + *
+     * Output only. The name of the target cluster.
+     * 
+ * + * string cluster_name = 7; + */ + public java.lang.String getClusterName() { + java.lang.Object ref = clusterName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The name of the target cluster.
+     * 
+ * + * string cluster_name = 7; + */ + public com.google.protobuf.ByteString + getClusterNameBytes() { + java.lang.Object ref = clusterName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The name of the target cluster.
+     * 
+ * + * string cluster_name = 7; + */ + public Builder setClusterName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + clusterName_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The name of the target cluster.
+     * 
+ * + * string cluster_name = 7; + */ + public Builder clearClusterName() { + + clusterName_ = getDefaultInstance().getClusterName(); + onChanged(); + return this; + } + /** + *
+     * Output only. The name of the target cluster.
+     * 
+ * + * string cluster_name = 7; + */ + public Builder setClusterNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + clusterName_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> parameters_; + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + private com.google.protobuf.MapField + internalGetMutableParameters() { + onChanged();; + if (parameters_ == null) { + parameters_ = com.google.protobuf.MapField.newMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + if (!parameters_.isMutable()) { + parameters_ = parameters_.copy(); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + + public boolean containsParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetParameters().getMap().containsKey(key); + } + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + + public java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + + public java.lang.String getParametersOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearParameters() { + internalGetMutableParameters().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + + public Builder removeParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableParameters().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableParameters() { + return internalGetMutableParameters().getMutableMap(); + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + public Builder putParameters( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableParameters().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Map from parameter names to values that were used for those parameters.
+     * 
+ * + * map<string, string> parameters = 8; + */ + + public Builder putAllParameters( + java.util.Map values) { + internalGetMutableParameters().getMutableMap() + .putAll(values); + return this; + } + + private com.google.protobuf.Timestamp startTime_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public boolean hasStartTime() { + return startTimeBuilder_ != null || startTime_ != null; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.Timestamp getStartTime() { + if (startTimeBuilder_ == null) { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } else { + return startTimeBuilder_.getMessage(); + } + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder setStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + startTime_ = value; + onChanged(); + } else { + startTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder setStartTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (startTimeBuilder_ == null) { + startTime_ = builderForValue.build(); + onChanged(); + } else { + startTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder mergeStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (startTime_ != null) { + startTime_ = + com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial(); + } else { + startTime_ = value; + } + onChanged(); + } else { + startTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder clearStartTime() { + if (startTimeBuilder_ == null) { + startTime_ = null; + onChanged(); + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { + + onChanged(); + return getStartTimeFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + if (startTimeBuilder_ != null) { + return startTimeBuilder_.getMessageOrBuilder(); + } else { + return startTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getStartTimeFieldBuilder() { + if (startTimeBuilder_ == null) { + startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getStartTime(), + getParentForChildren(), + isClean()); + startTime_ = null; + } + return startTimeBuilder_; + } + + private com.google.protobuf.Timestamp endTime_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> endTimeBuilder_; + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public boolean hasEndTime() { + return endTimeBuilder_ != null || endTime_ != null; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.Timestamp getEndTime() { + if (endTimeBuilder_ == null) { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } else { + return endTimeBuilder_.getMessage(); + } + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder setEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + endTime_ = value; + onChanged(); + } else { + endTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder setEndTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (endTimeBuilder_ == null) { + endTime_ = builderForValue.build(); + onChanged(); + } else { + endTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder mergeEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (endTime_ != null) { + endTime_ = + com.google.protobuf.Timestamp.newBuilder(endTime_).mergeFrom(value).buildPartial(); + } else { + endTime_ = value; + } + onChanged(); + } else { + endTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder clearEndTime() { + if (endTimeBuilder_ == null) { + endTime_ = null; + onChanged(); + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { + + onChanged(); + return getEndTimeFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + if (endTimeBuilder_ != null) { + return endTimeBuilder_.getMessageOrBuilder(); + } else { + return endTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getEndTimeFieldBuilder() { + if (endTimeBuilder_ == null) { + endTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getEndTime(), + getParentForChildren(), + isClean()); + endTime_ = null; + } + return endTimeBuilder_; + } + + private java.lang.Object clusterUuid_ = ""; + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public java.lang.String getClusterUuid() { + java.lang.Object ref = clusterUuid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterUuid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public com.google.protobuf.ByteString + getClusterUuidBytes() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public Builder setClusterUuid( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + clusterUuid_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public Builder clearClusterUuid() { + + clusterUuid_ = getDefaultInstance().getClusterUuid(); + onChanged(); + return this; + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public Builder setClusterUuidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + clusterUuid_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.WorkflowMetadata) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowMetadata) + private static final com.google.cloud.dataproc.v1.WorkflowMetadata DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.WorkflowMetadata(); + } + + public static com.google.cloud.dataproc.v1.WorkflowMetadata getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public WorkflowMetadata parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WorkflowMetadata(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowMetadata getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadataOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadataOrBuilder.java new file mode 100644 index 000000000000..af58f75f520e --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowMetadataOrBuilder.java @@ -0,0 +1,269 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface WorkflowMetadataOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.WorkflowMetadata) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Output only. The "resource name" of the template.
+   * 
+ * + * string template = 1; + */ + java.lang.String getTemplate(); + /** + *
+   * Output only. The "resource name" of the template.
+   * 
+ * + * string template = 1; + */ + com.google.protobuf.ByteString + getTemplateBytes(); + + /** + *
+   * Output only. The version of template at the time of
+   * workflow instantiation.
+   * 
+ * + * int32 version = 2; + */ + int getVersion(); + + /** + *
+   * Output only. The create cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + boolean hasCreateCluster(); + /** + *
+   * Output only. The create cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + com.google.cloud.dataproc.v1.ClusterOperation getCreateCluster(); + /** + *
+   * Output only. The create cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation create_cluster = 3; + */ + com.google.cloud.dataproc.v1.ClusterOperationOrBuilder getCreateClusterOrBuilder(); + + /** + *
+   * Output only. The workflow graph.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + boolean hasGraph(); + /** + *
+   * Output only. The workflow graph.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + com.google.cloud.dataproc.v1.WorkflowGraph getGraph(); + /** + *
+   * Output only. The workflow graph.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowGraph graph = 4; + */ + com.google.cloud.dataproc.v1.WorkflowGraphOrBuilder getGraphOrBuilder(); + + /** + *
+   * Output only. The delete cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + boolean hasDeleteCluster(); + /** + *
+   * Output only. The delete cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + com.google.cloud.dataproc.v1.ClusterOperation getDeleteCluster(); + /** + *
+   * Output only. The delete cluster operation metadata.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterOperation delete_cluster = 5; + */ + com.google.cloud.dataproc.v1.ClusterOperationOrBuilder getDeleteClusterOrBuilder(); + + /** + *
+   * Output only. The workflow state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + int getStateValue(); + /** + *
+   * Output only. The workflow state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowMetadata.State state = 6; + */ + com.google.cloud.dataproc.v1.WorkflowMetadata.State getState(); + + /** + *
+   * Output only. The name of the target cluster.
+   * 
+ * + * string cluster_name = 7; + */ + java.lang.String getClusterName(); + /** + *
+   * Output only. The name of the target cluster.
+   * 
+ * + * string cluster_name = 7; + */ + com.google.protobuf.ByteString + getClusterNameBytes(); + + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + int getParametersCount(); + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + boolean containsParameters( + java.lang.String key); + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getParameters(); + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + java.util.Map + getParametersMap(); + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + + java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Map from parameter names to values that were used for those parameters.
+   * 
+ * + * map<string, string> parameters = 8; + */ + + java.lang.String getParametersOrThrow( + java.lang.String key); + + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + boolean hasStartTime(); + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + com.google.protobuf.Timestamp getStartTime(); + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder(); + + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + boolean hasEndTime(); + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + com.google.protobuf.Timestamp getEndTime(); + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder(); + + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + java.lang.String getClusterUuid(); + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + com.google.protobuf.ByteString + getClusterUuidBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNode.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNode.java new file mode 100644 index 000000000000..08419fe360eb --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNode.java @@ -0,0 +1,1415 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * The workflow node.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowNode} + */ +public final class WorkflowNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.WorkflowNode) + WorkflowNodeOrBuilder { +private static final long serialVersionUID = 0L; + // Use WorkflowNode.newBuilder() to construct. + private WorkflowNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private WorkflowNode() { + stepId_ = ""; + prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + jobId_ = ""; + state_ = 0; + error_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WorkflowNode( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + stepId_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + prerequisiteStepIds_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000002; + } + prerequisiteStepIds_.add(s); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + jobId_ = s; + break; + } + case 40: { + int rawValue = input.readEnum(); + + state_ = rawValue; + break; + } + case 50: { + java.lang.String s = input.readStringRequireUtf8(); + + error_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + prerequisiteStepIds_ = prerequisiteStepIds_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowNode.class, com.google.cloud.dataproc.v1.WorkflowNode.Builder.class); + } + + /** + *
+   * The workflow node state.
+   * 
+ * + * Protobuf enum {@code google.cloud.dataproc.v1.WorkflowNode.NodeState} + */ + public enum NodeState + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * State is unspecified.
+     * 
+ * + * NODE_STATE_UNSPECIFIED = 0; + */ + NODE_STATE_UNSPECIFIED(0), + /** + *
+     * The node is awaiting prerequisite node to finish.
+     * 
+ * + * BLOCKED = 1; + */ + BLOCKED(1), + /** + *
+     * The node is runnable but not running.
+     * 
+ * + * RUNNABLE = 2; + */ + RUNNABLE(2), + /** + *
+     * The node is running.
+     * 
+ * + * RUNNING = 3; + */ + RUNNING(3), + /** + *
+     * The node completed successfully.
+     * 
+ * + * COMPLETED = 4; + */ + COMPLETED(4), + /** + *
+     * The node failed. A node can be marked FAILED because
+     * its ancestor or peer failed.
+     * 
+ * + * FAILED = 5; + */ + FAILED(5), + UNRECOGNIZED(-1), + ; + + /** + *
+     * State is unspecified.
+     * 
+ * + * NODE_STATE_UNSPECIFIED = 0; + */ + public static final int NODE_STATE_UNSPECIFIED_VALUE = 0; + /** + *
+     * The node is awaiting prerequisite node to finish.
+     * 
+ * + * BLOCKED = 1; + */ + public static final int BLOCKED_VALUE = 1; + /** + *
+     * The node is runnable but not running.
+     * 
+ * + * RUNNABLE = 2; + */ + public static final int RUNNABLE_VALUE = 2; + /** + *
+     * The node is running.
+     * 
+ * + * RUNNING = 3; + */ + public static final int RUNNING_VALUE = 3; + /** + *
+     * The node completed successfully.
+     * 
+ * + * COMPLETED = 4; + */ + public static final int COMPLETED_VALUE = 4; + /** + *
+     * The node failed. A node can be marked FAILED because
+     * its ancestor or peer failed.
+     * 
+ * + * FAILED = 5; + */ + public static final int FAILED_VALUE = 5; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static NodeState valueOf(int value) { + return forNumber(value); + } + + public static NodeState forNumber(int value) { + switch (value) { + case 0: return NODE_STATE_UNSPECIFIED; + case 1: return BLOCKED; + case 2: return RUNNABLE; + case 3: return RUNNING; + case 4: return COMPLETED; + case 5: return FAILED; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + NodeState> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public NodeState findValueByNumber(int number) { + return NodeState.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowNode.getDescriptor().getEnumTypes().get(0); + } + + private static final NodeState[] VALUES = values(); + + public static NodeState valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private NodeState(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.dataproc.v1.WorkflowNode.NodeState) + } + + private int bitField0_; + public static final int STEP_ID_FIELD_NUMBER = 1; + private volatile java.lang.Object stepId_; + /** + *
+   * Output only. The name of the node.
+   * 
+ * + * string step_id = 1; + */ + public java.lang.String getStepId() { + java.lang.Object ref = stepId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + stepId_ = s; + return s; + } + } + /** + *
+   * Output only. The name of the node.
+   * 
+ * + * string step_id = 1; + */ + public com.google.protobuf.ByteString + getStepIdBytes() { + java.lang.Object ref = stepId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stepId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PREREQUISITE_STEP_IDS_FIELD_NUMBER = 2; + private com.google.protobuf.LazyStringList prerequisiteStepIds_; + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public com.google.protobuf.ProtocolStringList + getPrerequisiteStepIdsList() { + return prerequisiteStepIds_; + } + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public int getPrerequisiteStepIdsCount() { + return prerequisiteStepIds_.size(); + } + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public java.lang.String getPrerequisiteStepIds(int index) { + return prerequisiteStepIds_.get(index); + } + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public com.google.protobuf.ByteString + getPrerequisiteStepIdsBytes(int index) { + return prerequisiteStepIds_.getByteString(index); + } + + public static final int JOB_ID_FIELD_NUMBER = 3; + private volatile java.lang.Object jobId_; + /** + *
+   * Output only. The job id; populated after the node enters RUNNING state.
+   * 
+ * + * string job_id = 3; + */ + public java.lang.String getJobId() { + java.lang.Object ref = jobId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobId_ = s; + return s; + } + } + /** + *
+   * Output only. The job id; populated after the node enters RUNNING state.
+   * 
+ * + * string job_id = 3; + */ + public com.google.protobuf.ByteString + getJobIdBytes() { + java.lang.Object ref = jobId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATE_FIELD_NUMBER = 5; + private int state_; + /** + *
+   * Output only. The node state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public int getStateValue() { + return state_; + } + /** + *
+   * Output only. The node state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public com.google.cloud.dataproc.v1.WorkflowNode.NodeState getState() { + @SuppressWarnings("deprecation") + com.google.cloud.dataproc.v1.WorkflowNode.NodeState result = com.google.cloud.dataproc.v1.WorkflowNode.NodeState.valueOf(state_); + return result == null ? com.google.cloud.dataproc.v1.WorkflowNode.NodeState.UNRECOGNIZED : result; + } + + public static final int ERROR_FIELD_NUMBER = 6; + private volatile java.lang.Object error_; + /** + *
+   * Output only. The error detail.
+   * 
+ * + * string error = 6; + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } + } + /** + *
+   * Output only. The error detail.
+   * 
+ * + * string error = 6; + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getStepIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, stepId_); + } + for (int i = 0; i < prerequisiteStepIds_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, prerequisiteStepIds_.getRaw(i)); + } + if (!getJobIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, jobId_); + } + if (state_ != com.google.cloud.dataproc.v1.WorkflowNode.NodeState.NODE_STATE_UNSPECIFIED.getNumber()) { + output.writeEnum(5, state_); + } + if (!getErrorBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, error_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getStepIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, stepId_); + } + { + int dataSize = 0; + for (int i = 0; i < prerequisiteStepIds_.size(); i++) { + dataSize += computeStringSizeNoTag(prerequisiteStepIds_.getRaw(i)); + } + size += dataSize; + size += 1 * getPrerequisiteStepIdsList().size(); + } + if (!getJobIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, jobId_); + } + if (state_ != com.google.cloud.dataproc.v1.WorkflowNode.NodeState.NODE_STATE_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, state_); + } + if (!getErrorBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, error_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.WorkflowNode)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.WorkflowNode other = (com.google.cloud.dataproc.v1.WorkflowNode) obj; + + boolean result = true; + result = result && getStepId() + .equals(other.getStepId()); + result = result && getPrerequisiteStepIdsList() + .equals(other.getPrerequisiteStepIdsList()); + result = result && getJobId() + .equals(other.getJobId()); + result = result && state_ == other.state_; + result = result && getError() + .equals(other.getError()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + getStepId().hashCode(); + if (getPrerequisiteStepIdsCount() > 0) { + hash = (37 * hash) + PREREQUISITE_STEP_IDS_FIELD_NUMBER; + hash = (53 * hash) + getPrerequisiteStepIdsList().hashCode(); + } + hash = (37 * hash) + JOB_ID_FIELD_NUMBER; + hash = (53 * hash) + getJobId().hashCode(); + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + state_; + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.WorkflowNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * The workflow node.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.WorkflowNode) + com.google.cloud.dataproc.v1.WorkflowNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowNode.class, com.google.cloud.dataproc.v1.WorkflowNode.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.WorkflowNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + stepId_ = ""; + + prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + jobId_ = ""; + + state_ = 0; + + error_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowNode_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowNode getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.WorkflowNode.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowNode build() { + com.google.cloud.dataproc.v1.WorkflowNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowNode buildPartial() { + com.google.cloud.dataproc.v1.WorkflowNode result = new com.google.cloud.dataproc.v1.WorkflowNode(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.stepId_ = stepId_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + prerequisiteStepIds_ = prerequisiteStepIds_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.prerequisiteStepIds_ = prerequisiteStepIds_; + result.jobId_ = jobId_; + result.state_ = state_; + result.error_ = error_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.WorkflowNode) { + return mergeFrom((com.google.cloud.dataproc.v1.WorkflowNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.WorkflowNode other) { + if (other == com.google.cloud.dataproc.v1.WorkflowNode.getDefaultInstance()) return this; + if (!other.getStepId().isEmpty()) { + stepId_ = other.stepId_; + onChanged(); + } + if (!other.prerequisiteStepIds_.isEmpty()) { + if (prerequisiteStepIds_.isEmpty()) { + prerequisiteStepIds_ = other.prerequisiteStepIds_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.addAll(other.prerequisiteStepIds_); + } + onChanged(); + } + if (!other.getJobId().isEmpty()) { + jobId_ = other.jobId_; + onChanged(); + } + if (other.state_ != 0) { + setStateValue(other.getStateValue()); + } + if (!other.getError().isEmpty()) { + error_ = other.error_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.WorkflowNode parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.WorkflowNode) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object stepId_ = ""; + /** + *
+     * Output only. The name of the node.
+     * 
+ * + * string step_id = 1; + */ + public java.lang.String getStepId() { + java.lang.Object ref = stepId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + stepId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The name of the node.
+     * 
+ * + * string step_id = 1; + */ + public com.google.protobuf.ByteString + getStepIdBytes() { + java.lang.Object ref = stepId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stepId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The name of the node.
+     * 
+ * + * string step_id = 1; + */ + public Builder setStepId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The name of the node.
+     * 
+ * + * string step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = getDefaultInstance().getStepId(); + onChanged(); + return this; + } + /** + *
+     * Output only. The name of the node.
+     * 
+ * + * string step_id = 1; + */ + public Builder setStepIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + stepId_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensurePrerequisiteStepIdsIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + prerequisiteStepIds_ = new com.google.protobuf.LazyStringArrayList(prerequisiteStepIds_); + bitField0_ |= 0x00000002; + } + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public com.google.protobuf.ProtocolStringList + getPrerequisiteStepIdsList() { + return prerequisiteStepIds_.getUnmodifiableView(); + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public int getPrerequisiteStepIdsCount() { + return prerequisiteStepIds_.size(); + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public java.lang.String getPrerequisiteStepIds(int index) { + return prerequisiteStepIds_.get(index); + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public com.google.protobuf.ByteString + getPrerequisiteStepIdsBytes(int index) { + return prerequisiteStepIds_.getByteString(index); + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public Builder setPrerequisiteStepIds( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public Builder addPrerequisiteStepIds( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.add(value); + onChanged(); + return this; + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public Builder addAllPrerequisiteStepIds( + java.lang.Iterable values) { + ensurePrerequisiteStepIdsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, prerequisiteStepIds_); + onChanged(); + return this; + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public Builder clearPrerequisiteStepIds() { + prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+     * Output only. Node's prerequisite nodes.
+     * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + public Builder addPrerequisiteStepIdsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensurePrerequisiteStepIdsIsMutable(); + prerequisiteStepIds_.add(value); + onChanged(); + return this; + } + + private java.lang.Object jobId_ = ""; + /** + *
+     * Output only. The job id; populated after the node enters RUNNING state.
+     * 
+ * + * string job_id = 3; + */ + public java.lang.String getJobId() { + java.lang.Object ref = jobId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The job id; populated after the node enters RUNNING state.
+     * 
+ * + * string job_id = 3; + */ + public com.google.protobuf.ByteString + getJobIdBytes() { + java.lang.Object ref = jobId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The job id; populated after the node enters RUNNING state.
+     * 
+ * + * string job_id = 3; + */ + public Builder setJobId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + jobId_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The job id; populated after the node enters RUNNING state.
+     * 
+ * + * string job_id = 3; + */ + public Builder clearJobId() { + + jobId_ = getDefaultInstance().getJobId(); + onChanged(); + return this; + } + /** + *
+     * Output only. The job id; populated after the node enters RUNNING state.
+     * 
+ * + * string job_id = 3; + */ + public Builder setJobIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + jobId_ = value; + onChanged(); + return this; + } + + private int state_ = 0; + /** + *
+     * Output only. The node state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public int getStateValue() { + return state_; + } + /** + *
+     * Output only. The node state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public Builder setStateValue(int value) { + state_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The node state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public com.google.cloud.dataproc.v1.WorkflowNode.NodeState getState() { + @SuppressWarnings("deprecation") + com.google.cloud.dataproc.v1.WorkflowNode.NodeState result = com.google.cloud.dataproc.v1.WorkflowNode.NodeState.valueOf(state_); + return result == null ? com.google.cloud.dataproc.v1.WorkflowNode.NodeState.UNRECOGNIZED : result; + } + /** + *
+     * Output only. The node state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public Builder setState(com.google.cloud.dataproc.v1.WorkflowNode.NodeState value) { + if (value == null) { + throw new NullPointerException(); + } + + state_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+     * Output only. The node state.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + public Builder clearState() { + + state_ = 0; + onChanged(); + return this; + } + + private java.lang.Object error_ = ""; + /** + *
+     * Output only. The error detail.
+     * 
+ * + * string error = 6; + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The error detail.
+     * 
+ * + * string error = 6; + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The error detail.
+     * 
+ * + * string error = 6; + */ + public Builder setError( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + error_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The error detail.
+     * 
+ * + * string error = 6; + */ + public Builder clearError() { + + error_ = getDefaultInstance().getError(); + onChanged(); + return this; + } + /** + *
+     * Output only. The error detail.
+     * 
+ * + * string error = 6; + */ + public Builder setErrorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + error_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.WorkflowNode) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowNode) + private static final com.google.cloud.dataproc.v1.WorkflowNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.WorkflowNode(); + } + + public static com.google.cloud.dataproc.v1.WorkflowNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public WorkflowNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WorkflowNode(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNodeOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNodeOrBuilder.java new file mode 100644 index 000000000000..369161a5bc10 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowNodeOrBuilder.java @@ -0,0 +1,115 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface WorkflowNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.WorkflowNode) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Output only. The name of the node.
+   * 
+ * + * string step_id = 1; + */ + java.lang.String getStepId(); + /** + *
+   * Output only. The name of the node.
+   * 
+ * + * string step_id = 1; + */ + com.google.protobuf.ByteString + getStepIdBytes(); + + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + java.util.List + getPrerequisiteStepIdsList(); + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + int getPrerequisiteStepIdsCount(); + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + java.lang.String getPrerequisiteStepIds(int index); + /** + *
+   * Output only. Node's prerequisite nodes.
+   * 
+ * + * repeated string prerequisite_step_ids = 2; + */ + com.google.protobuf.ByteString + getPrerequisiteStepIdsBytes(int index); + + /** + *
+   * Output only. The job id; populated after the node enters RUNNING state.
+   * 
+ * + * string job_id = 3; + */ + java.lang.String getJobId(); + /** + *
+   * Output only. The job id; populated after the node enters RUNNING state.
+   * 
+ * + * string job_id = 3; + */ + com.google.protobuf.ByteString + getJobIdBytes(); + + /** + *
+   * Output only. The node state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + int getStateValue(); + /** + *
+   * Output only. The node state.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowNode.NodeState state = 5; + */ + com.google.cloud.dataproc.v1.WorkflowNode.NodeState getState(); + + /** + *
+   * Output only. The error detail.
+   * 
+ * + * string error = 6; + */ + java.lang.String getError(); + /** + *
+   * Output only. The error detail.
+   * 
+ * + * string error = 6; + */ + com.google.protobuf.ByteString + getErrorBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplate.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplate.java new file mode 100644 index 000000000000..5e593f0d72b3 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplate.java @@ -0,0 +1,2882 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * A Cloud Dataproc workflow template resource.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowTemplate} + */ +public final class WorkflowTemplate extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.WorkflowTemplate) + WorkflowTemplateOrBuilder { +private static final long serialVersionUID = 0L; + // Use WorkflowTemplate.newBuilder() to construct. + private WorkflowTemplate(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private WorkflowTemplate() { + id_ = ""; + name_ = ""; + version_ = 0; + jobs_ = java.util.Collections.emptyList(); + parameters_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WorkflowTemplate( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + id_ = s; + break; + } + case 24: { + + version_ = input.readInt32(); + break; + } + case 34: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (createTime_ != null) { + subBuilder = createTime_.toBuilder(); + } + createTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(createTime_); + createTime_ = subBuilder.buildPartial(); + } + + break; + } + case 42: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (updateTime_ != null) { + subBuilder = updateTime_.toBuilder(); + } + updateTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(updateTime_); + updateTime_ = subBuilder.buildPartial(); + } + + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + labels_ = com.google.protobuf.MapField.newMapField( + LabelsDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000020; + } + com.google.protobuf.MapEntry + labels__ = input.readMessage( + LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + labels_.getMutableMap().put( + labels__.getKey(), labels__.getValue()); + break; + } + case 58: { + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder subBuilder = null; + if (placement_ != null) { + subBuilder = placement_.toBuilder(); + } + placement_ = input.readMessage(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(placement_); + placement_ = subBuilder.buildPartial(); + } + + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + jobs_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000080; + } + jobs_.add( + input.readMessage(com.google.cloud.dataproc.v1.OrderedJob.parser(), extensionRegistry)); + break; + } + case 74: { + if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000100; + } + parameters_.add( + input.readMessage(com.google.cloud.dataproc.v1.TemplateParameter.parser(), extensionRegistry)); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + jobs_ = java.util.Collections.unmodifiableList(jobs_); + } + if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = java.util.Collections.unmodifiableList(parameters_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplate_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 6: + return internalGetLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplate_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowTemplate.class, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder.class); + } + + private int bitField0_; + public static final int ID_FIELD_NUMBER = 2; + private volatile java.lang.Object id_; + /** + *
+   * Required. The template id.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string id = 2; + */ + public java.lang.String getId() { + java.lang.Object ref = id_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + id_ = s; + return s; + } + } + /** + *
+   * Required. The template id.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string id = 2; + */ + public com.google.protobuf.ByteString + getIdBytes() { + java.lang.Object ref = id_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + id_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Output only. The "resource name" of the template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Output only. The "resource name" of the template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VERSION_FIELD_NUMBER = 3; + private int version_; + /** + *
+   * Optional. Used to perform a consistent read-modify-write.
+   * This field should be left blank for a `CreateWorkflowTemplate` request. It
+   * is required for an `UpdateWorkflowTemplate` request, and must match the
+   * current server version. A typical update template flow would fetch the
+   * current template with a `GetWorkflowTemplate` request, which will return
+   * the current template with the `version` field filled in with the
+   * current server version. The user updates other fields in the template,
+   * then returns it as part of the `UpdateWorkflowTemplate` request.
+   * 
+ * + * int32 version = 3; + */ + public int getVersion() { + return version_; + } + + public static final int CREATE_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp createTime_; + /** + *
+   * Output only. The time template was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + *
+   * Output only. The time template was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + *
+   * Output only. The time template was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return getCreateTime(); + } + + public static final int UPDATE_TIME_FIELD_NUMBER = 5; + private com.google.protobuf.Timestamp updateTime_; + /** + *
+   * Output only. The time template was last updated.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public boolean hasUpdateTime() { + return updateTime_ != null; + } + /** + *
+   * Output only. The time template was last updated.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public com.google.protobuf.Timestamp getUpdateTime() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + /** + *
+   * Output only. The time template was last updated.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + return getUpdateTime(); + } + + public static final int LABELS_FIELD_NUMBER = 6; + private static final class LabelsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplate_LabelsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> labels_; + private com.google.protobuf.MapField + internalGetLabels() { + if (labels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + return labels_; + } + + public int getLabelsCount() { + return internalGetLabels().getMap().size(); + } + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + + public boolean containsLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabels().getMap().containsKey(key); + } + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabels() { + return getLabelsMap(); + } + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + + public java.util.Map getLabelsMap() { + return internalGetLabels().getMap(); + } + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + + public java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + + public java.lang.String getLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int PLACEMENT_FIELD_NUMBER = 7; + private com.google.cloud.dataproc.v1.WorkflowTemplatePlacement placement_; + /** + *
+   * Required. WorkflowTemplate scheduling information.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public boolean hasPlacement() { + return placement_ != null; + } + /** + *
+   * Required. WorkflowTemplate scheduling information.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getPlacement() { + return placement_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance() : placement_; + } + /** + *
+   * Required. WorkflowTemplate scheduling information.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder getPlacementOrBuilder() { + return getPlacement(); + } + + public static final int JOBS_FIELD_NUMBER = 8; + private java.util.List jobs_; + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public java.util.List getJobsList() { + return jobs_; + } + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public java.util.List + getJobsOrBuilderList() { + return jobs_; + } + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public int getJobsCount() { + return jobs_.size(); + } + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJob getJobs(int index) { + return jobs_.get(index); + } + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJobOrBuilder getJobsOrBuilder( + int index) { + return jobs_.get(index); + } + + public static final int PARAMETERS_FIELD_NUMBER = 9; + private java.util.List parameters_; + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public java.util.List getParametersList() { + return parameters_; + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public java.util.List + getParametersOrBuilderList() { + return parameters_; + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public int getParametersCount() { + return parameters_.size(); + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameter getParameters(int index) { + return parameters_.get(index); + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameterOrBuilder getParametersOrBuilder( + int index) { + return parameters_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, id_); + } + if (version_ != 0) { + output.writeInt32(3, version_); + } + if (createTime_ != null) { + output.writeMessage(4, getCreateTime()); + } + if (updateTime_ != null) { + output.writeMessage(5, getUpdateTime()); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetLabels(), + LabelsDefaultEntryHolder.defaultEntry, + 6); + if (placement_ != null) { + output.writeMessage(7, getPlacement()); + } + for (int i = 0; i < jobs_.size(); i++) { + output.writeMessage(8, jobs_.get(i)); + } + for (int i = 0; i < parameters_.size(); i++) { + output.writeMessage(9, parameters_.get(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, id_); + } + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, version_); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getCreateTime()); + } + if (updateTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getUpdateTime()); + } + for (java.util.Map.Entry entry + : internalGetLabels().getMap().entrySet()) { + com.google.protobuf.MapEntry + labels__ = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, labels__); + } + if (placement_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, getPlacement()); + } + for (int i = 0; i < jobs_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, jobs_.get(i)); + } + for (int i = 0; i < parameters_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(9, parameters_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.WorkflowTemplate)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.WorkflowTemplate other = (com.google.cloud.dataproc.v1.WorkflowTemplate) obj; + + boolean result = true; + result = result && getId() + .equals(other.getId()); + result = result && getName() + .equals(other.getName()); + result = result && (getVersion() + == other.getVersion()); + result = result && (hasCreateTime() == other.hasCreateTime()); + if (hasCreateTime()) { + result = result && getCreateTime() + .equals(other.getCreateTime()); + } + result = result && (hasUpdateTime() == other.hasUpdateTime()); + if (hasUpdateTime()) { + result = result && getUpdateTime() + .equals(other.getUpdateTime()); + } + result = result && internalGetLabels().equals( + other.internalGetLabels()); + result = result && (hasPlacement() == other.hasPlacement()); + if (hasPlacement()) { + result = result && getPlacement() + .equals(other.getPlacement()); + } + result = result && getJobsList() + .equals(other.getJobsList()); + result = result && getParametersList() + .equals(other.getParametersList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasUpdateTime()) { + hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getUpdateTime().hashCode(); + } + if (!internalGetLabels().getMap().isEmpty()) { + hash = (37 * hash) + LABELS_FIELD_NUMBER; + hash = (53 * hash) + internalGetLabels().hashCode(); + } + if (hasPlacement()) { + hash = (37 * hash) + PLACEMENT_FIELD_NUMBER; + hash = (53 * hash) + getPlacement().hashCode(); + } + if (getJobsCount() > 0) { + hash = (37 * hash) + JOBS_FIELD_NUMBER; + hash = (53 * hash) + getJobsList().hashCode(); + } + if (getParametersCount() > 0) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + getParametersList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplate parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.WorkflowTemplate prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A Cloud Dataproc workflow template resource.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowTemplate} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.WorkflowTemplate) + com.google.cloud.dataproc.v1.WorkflowTemplateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplate_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 6: + return internalGetLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 6: + return internalGetMutableLabels(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplate_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowTemplate.class, com.google.cloud.dataproc.v1.WorkflowTemplate.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.WorkflowTemplate.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getJobsFieldBuilder(); + getParametersFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + id_ = ""; + + name_ = ""; + + version_ = 0; + + if (createTimeBuilder_ == null) { + createTime_ = null; + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + if (updateTimeBuilder_ == null) { + updateTime_ = null; + } else { + updateTime_ = null; + updateTimeBuilder_ = null; + } + internalGetMutableLabels().clear(); + if (placementBuilder_ == null) { + placement_ = null; + } else { + placement_ = null; + placementBuilder_ = null; + } + if (jobsBuilder_ == null) { + jobs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000080); + } else { + jobsBuilder_.clear(); + } + if (parametersBuilder_ == null) { + parameters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000100); + } else { + parametersBuilder_.clear(); + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplate_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplate getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplate build() { + com.google.cloud.dataproc.v1.WorkflowTemplate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplate buildPartial() { + com.google.cloud.dataproc.v1.WorkflowTemplate result = new com.google.cloud.dataproc.v1.WorkflowTemplate(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.id_ = id_; + result.name_ = name_; + result.version_ = version_; + if (createTimeBuilder_ == null) { + result.createTime_ = createTime_; + } else { + result.createTime_ = createTimeBuilder_.build(); + } + if (updateTimeBuilder_ == null) { + result.updateTime_ = updateTime_; + } else { + result.updateTime_ = updateTimeBuilder_.build(); + } + result.labels_ = internalGetLabels(); + result.labels_.makeImmutable(); + if (placementBuilder_ == null) { + result.placement_ = placement_; + } else { + result.placement_ = placementBuilder_.build(); + } + if (jobsBuilder_ == null) { + if (((bitField0_ & 0x00000080) == 0x00000080)) { + jobs_ = java.util.Collections.unmodifiableList(jobs_); + bitField0_ = (bitField0_ & ~0x00000080); + } + result.jobs_ = jobs_; + } else { + result.jobs_ = jobsBuilder_.build(); + } + if (parametersBuilder_ == null) { + if (((bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = java.util.Collections.unmodifiableList(parameters_); + bitField0_ = (bitField0_ & ~0x00000100); + } + result.parameters_ = parameters_; + } else { + result.parameters_ = parametersBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.WorkflowTemplate) { + return mergeFrom((com.google.cloud.dataproc.v1.WorkflowTemplate)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.WorkflowTemplate other) { + if (other == com.google.cloud.dataproc.v1.WorkflowTemplate.getDefaultInstance()) return this; + if (!other.getId().isEmpty()) { + id_ = other.id_; + onChanged(); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasUpdateTime()) { + mergeUpdateTime(other.getUpdateTime()); + } + internalGetMutableLabels().mergeFrom( + other.internalGetLabels()); + if (other.hasPlacement()) { + mergePlacement(other.getPlacement()); + } + if (jobsBuilder_ == null) { + if (!other.jobs_.isEmpty()) { + if (jobs_.isEmpty()) { + jobs_ = other.jobs_; + bitField0_ = (bitField0_ & ~0x00000080); + } else { + ensureJobsIsMutable(); + jobs_.addAll(other.jobs_); + } + onChanged(); + } + } else { + if (!other.jobs_.isEmpty()) { + if (jobsBuilder_.isEmpty()) { + jobsBuilder_.dispose(); + jobsBuilder_ = null; + jobs_ = other.jobs_; + bitField0_ = (bitField0_ & ~0x00000080); + jobsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getJobsFieldBuilder() : null; + } else { + jobsBuilder_.addAllMessages(other.jobs_); + } + } + } + if (parametersBuilder_ == null) { + if (!other.parameters_.isEmpty()) { + if (parameters_.isEmpty()) { + parameters_ = other.parameters_; + bitField0_ = (bitField0_ & ~0x00000100); + } else { + ensureParametersIsMutable(); + parameters_.addAll(other.parameters_); + } + onChanged(); + } + } else { + if (!other.parameters_.isEmpty()) { + if (parametersBuilder_.isEmpty()) { + parametersBuilder_.dispose(); + parametersBuilder_ = null; + parameters_ = other.parameters_; + bitField0_ = (bitField0_ & ~0x00000100); + parametersBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getParametersFieldBuilder() : null; + } else { + parametersBuilder_.addAllMessages(other.parameters_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.WorkflowTemplate parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.WorkflowTemplate) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object id_ = ""; + /** + *
+     * Required. The template id.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string id = 2; + */ + public java.lang.String getId() { + java.lang.Object ref = id_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + id_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required. The template id.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string id = 2; + */ + public com.google.protobuf.ByteString + getIdBytes() { + java.lang.Object ref = id_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + id_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required. The template id.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string id = 2; + */ + public Builder setId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + id_ = value; + onChanged(); + return this; + } + /** + *
+     * Required. The template id.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string id = 2; + */ + public Builder clearId() { + + id_ = getDefaultInstance().getId(); + onChanged(); + return this; + } + /** + *
+     * Required. The template id.
+     * The id must contain only letters (a-z, A-Z), numbers (0-9),
+     * underscores (_), and hyphens (-). Cannot begin or end with underscore
+     * or hyphen. Must consist of between 3 and 50 characters.
+     * 
+ * + * string id = 2; + */ + public Builder setIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + id_ = value; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+     * Output only. The "resource name" of the template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The "resource name" of the template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The "resource name" of the template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The "resource name" of the template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Output only. The "resource name" of the template, as described
+     * in https://cloud.google.com/apis/design/resource_names of the form
+     * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+     * 
+ * + * string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private int version_ ; + /** + *
+     * Optional. Used to perform a consistent read-modify-write.
+     * This field should be left blank for a `CreateWorkflowTemplate` request. It
+     * is required for an `UpdateWorkflowTemplate` request, and must match the
+     * current server version. A typical update template flow would fetch the
+     * current template with a `GetWorkflowTemplate` request, which will return
+     * the current template with the `version` field filled in with the
+     * current server version. The user updates other fields in the template,
+     * then returns it as part of the `UpdateWorkflowTemplate` request.
+     * 
+ * + * int32 version = 3; + */ + public int getVersion() { + return version_; + } + /** + *
+     * Optional. Used to perform a consistent read-modify-write.
+     * This field should be left blank for a `CreateWorkflowTemplate` request. It
+     * is required for an `UpdateWorkflowTemplate` request, and must match the
+     * current server version. A typical update template flow would fetch the
+     * current template with a `GetWorkflowTemplate` request, which will return
+     * the current template with the `version` field filled in with the
+     * current server version. The user updates other fields in the template,
+     * then returns it as part of the `UpdateWorkflowTemplate` request.
+     * 
+ * + * int32 version = 3; + */ + public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. Used to perform a consistent read-modify-write.
+     * This field should be left blank for a `CreateWorkflowTemplate` request. It
+     * is required for an `UpdateWorkflowTemplate` request, and must match the
+     * current server version. A typical update template flow would fetch the
+     * current template with a `GetWorkflowTemplate` request, which will return
+     * the current template with the `version` field filled in with the
+     * current server version. The user updates other fields in the template,
+     * then returns it as part of the `UpdateWorkflowTemplate` request.
+     * 
+ * + * int32 version = 3; + */ + public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> createTimeBuilder_; + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public boolean hasCreateTime() { + return createTimeBuilder_ != null || createTime_ != null; + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + onChanged(); + } else { + createTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder setCreateTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + onChanged(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (createTime_ != null) { + createTime_ = + com.google.protobuf.Timestamp.newBuilder(createTime_).mergeFrom(value).buildPartial(); + } else { + createTime_ = value; + } + onChanged(); + } else { + createTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder clearCreateTime() { + if (createTimeBuilder_ == null) { + createTime_ = null; + onChanged(); + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + } + /** + *
+     * Output only. The time template was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), + getParentForChildren(), + isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp updateTime_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> updateTimeBuilder_; + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public boolean hasUpdateTime() { + return updateTimeBuilder_ != null || updateTime_ != null; + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public com.google.protobuf.Timestamp getUpdateTime() { + if (updateTimeBuilder_ == null) { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } else { + return updateTimeBuilder_.getMessage(); + } + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + updateTime_ = value; + onChanged(); + } else { + updateTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public Builder setUpdateTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (updateTimeBuilder_ == null) { + updateTime_ = builderForValue.build(); + onChanged(); + } else { + updateTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (updateTime_ != null) { + updateTime_ = + com.google.protobuf.Timestamp.newBuilder(updateTime_).mergeFrom(value).buildPartial(); + } else { + updateTime_ = value; + } + onChanged(); + } else { + updateTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public Builder clearUpdateTime() { + if (updateTimeBuilder_ == null) { + updateTime_ = null; + onChanged(); + } else { + updateTime_ = null; + updateTimeBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { + + onChanged(); + return getUpdateTimeFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + if (updateTimeBuilder_ != null) { + return updateTimeBuilder_.getMessageOrBuilder(); + } else { + return updateTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + } + /** + *
+     * Output only. The time template was last updated.
+     * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getUpdateTimeFieldBuilder() { + if (updateTimeBuilder_ == null) { + updateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getUpdateTime(), + getParentForChildren(), + isClean()); + updateTime_ = null; + } + return updateTimeBuilder_; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> labels_; + private com.google.protobuf.MapField + internalGetLabels() { + if (labels_ == null) { + return com.google.protobuf.MapField.emptyMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + return labels_; + } + private com.google.protobuf.MapField + internalGetMutableLabels() { + onChanged();; + if (labels_ == null) { + labels_ = com.google.protobuf.MapField.newMapField( + LabelsDefaultEntryHolder.defaultEntry); + } + if (!labels_.isMutable()) { + labels_ = labels_.copy(); + } + return labels_; + } + + public int getLabelsCount() { + return internalGetLabels().getMap().size(); + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + + public boolean containsLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetLabels().getMap().containsKey(key); + } + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getLabels() { + return getLabelsMap(); + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + + public java.util.Map getLabelsMap() { + return internalGetLabels().getMap(); + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + + public java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + + public java.lang.String getLabelsOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetLabels().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearLabels() { + internalGetMutableLabels().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + + public Builder removeLabels( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableLabels().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableLabels() { + return internalGetMutableLabels().getMutableMap(); + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + public Builder putLabels( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableLabels().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Optional. The labels to associate with this template. These labels
+     * will be propagated to all jobs and clusters created by the workflow
+     * instance.
+     * Label **keys** must contain 1 to 63 characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * Label **values** may be empty, but, if present, must contain 1 to 63
+     * characters, and must conform to
+     * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+     * No more than 32 labels can be associated with a template.
+     * 
+ * + * map<string, string> labels = 6; + */ + + public Builder putAllLabels( + java.util.Map values) { + internalGetMutableLabels().getMutableMap() + .putAll(values); + return this; + } + + private com.google.cloud.dataproc.v1.WorkflowTemplatePlacement placement_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement, com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder, com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder> placementBuilder_; + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public boolean hasPlacement() { + return placementBuilder_ != null || placement_ != null; + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getPlacement() { + if (placementBuilder_ == null) { + return placement_ == null ? com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance() : placement_; + } else { + return placementBuilder_.getMessage(); + } + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public Builder setPlacement(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement value) { + if (placementBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + placement_ = value; + onChanged(); + } else { + placementBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public Builder setPlacement( + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder builderForValue) { + if (placementBuilder_ == null) { + placement_ = builderForValue.build(); + onChanged(); + } else { + placementBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public Builder mergePlacement(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement value) { + if (placementBuilder_ == null) { + if (placement_ != null) { + placement_ = + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.newBuilder(placement_).mergeFrom(value).buildPartial(); + } else { + placement_ = value; + } + onChanged(); + } else { + placementBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public Builder clearPlacement() { + if (placementBuilder_ == null) { + placement_ = null; + onChanged(); + } else { + placement_ = null; + placementBuilder_ = null; + } + + return this; + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder getPlacementBuilder() { + + onChanged(); + return getPlacementFieldBuilder().getBuilder(); + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder getPlacementOrBuilder() { + if (placementBuilder_ != null) { + return placementBuilder_.getMessageOrBuilder(); + } else { + return placement_ == null ? + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance() : placement_; + } + } + /** + *
+     * Required. WorkflowTemplate scheduling information.
+     * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement, com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder, com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder> + getPlacementFieldBuilder() { + if (placementBuilder_ == null) { + placementBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement, com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder, com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder>( + getPlacement(), + getParentForChildren(), + isClean()); + placement_ = null; + } + return placementBuilder_; + } + + private java.util.List jobs_ = + java.util.Collections.emptyList(); + private void ensureJobsIsMutable() { + if (!((bitField0_ & 0x00000080) == 0x00000080)) { + jobs_ = new java.util.ArrayList(jobs_); + bitField0_ |= 0x00000080; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.OrderedJob, com.google.cloud.dataproc.v1.OrderedJob.Builder, com.google.cloud.dataproc.v1.OrderedJobOrBuilder> jobsBuilder_; + + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public java.util.List getJobsList() { + if (jobsBuilder_ == null) { + return java.util.Collections.unmodifiableList(jobs_); + } else { + return jobsBuilder_.getMessageList(); + } + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public int getJobsCount() { + if (jobsBuilder_ == null) { + return jobs_.size(); + } else { + return jobsBuilder_.getCount(); + } + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJob getJobs(int index) { + if (jobsBuilder_ == null) { + return jobs_.get(index); + } else { + return jobsBuilder_.getMessage(index); + } + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder setJobs( + int index, com.google.cloud.dataproc.v1.OrderedJob value) { + if (jobsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureJobsIsMutable(); + jobs_.set(index, value); + onChanged(); + } else { + jobsBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder setJobs( + int index, com.google.cloud.dataproc.v1.OrderedJob.Builder builderForValue) { + if (jobsBuilder_ == null) { + ensureJobsIsMutable(); + jobs_.set(index, builderForValue.build()); + onChanged(); + } else { + jobsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder addJobs(com.google.cloud.dataproc.v1.OrderedJob value) { + if (jobsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureJobsIsMutable(); + jobs_.add(value); + onChanged(); + } else { + jobsBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder addJobs( + int index, com.google.cloud.dataproc.v1.OrderedJob value) { + if (jobsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureJobsIsMutable(); + jobs_.add(index, value); + onChanged(); + } else { + jobsBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder addJobs( + com.google.cloud.dataproc.v1.OrderedJob.Builder builderForValue) { + if (jobsBuilder_ == null) { + ensureJobsIsMutable(); + jobs_.add(builderForValue.build()); + onChanged(); + } else { + jobsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder addJobs( + int index, com.google.cloud.dataproc.v1.OrderedJob.Builder builderForValue) { + if (jobsBuilder_ == null) { + ensureJobsIsMutable(); + jobs_.add(index, builderForValue.build()); + onChanged(); + } else { + jobsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder addAllJobs( + java.lang.Iterable values) { + if (jobsBuilder_ == null) { + ensureJobsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, jobs_); + onChanged(); + } else { + jobsBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder clearJobs() { + if (jobsBuilder_ == null) { + jobs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000080); + onChanged(); + } else { + jobsBuilder_.clear(); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public Builder removeJobs(int index) { + if (jobsBuilder_ == null) { + ensureJobsIsMutable(); + jobs_.remove(index); + onChanged(); + } else { + jobsBuilder_.remove(index); + } + return this; + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJob.Builder getJobsBuilder( + int index) { + return getJobsFieldBuilder().getBuilder(index); + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJobOrBuilder getJobsOrBuilder( + int index) { + if (jobsBuilder_ == null) { + return jobs_.get(index); } else { + return jobsBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public java.util.List + getJobsOrBuilderList() { + if (jobsBuilder_ != null) { + return jobsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(jobs_); + } + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJob.Builder addJobsBuilder() { + return getJobsFieldBuilder().addBuilder( + com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance()); + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public com.google.cloud.dataproc.v1.OrderedJob.Builder addJobsBuilder( + int index) { + return getJobsFieldBuilder().addBuilder( + index, com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance()); + } + /** + *
+     * Required. The Directed Acyclic Graph of Jobs to submit.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + public java.util.List + getJobsBuilderList() { + return getJobsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.OrderedJob, com.google.cloud.dataproc.v1.OrderedJob.Builder, com.google.cloud.dataproc.v1.OrderedJobOrBuilder> + getJobsFieldBuilder() { + if (jobsBuilder_ == null) { + jobsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.OrderedJob, com.google.cloud.dataproc.v1.OrderedJob.Builder, com.google.cloud.dataproc.v1.OrderedJobOrBuilder>( + jobs_, + ((bitField0_ & 0x00000080) == 0x00000080), + getParentForChildren(), + isClean()); + jobs_ = null; + } + return jobsBuilder_; + } + + private java.util.List parameters_ = + java.util.Collections.emptyList(); + private void ensureParametersIsMutable() { + if (!((bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = new java.util.ArrayList(parameters_); + bitField0_ |= 0x00000100; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.TemplateParameter, com.google.cloud.dataproc.v1.TemplateParameter.Builder, com.google.cloud.dataproc.v1.TemplateParameterOrBuilder> parametersBuilder_; + + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public java.util.List getParametersList() { + if (parametersBuilder_ == null) { + return java.util.Collections.unmodifiableList(parameters_); + } else { + return parametersBuilder_.getMessageList(); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public int getParametersCount() { + if (parametersBuilder_ == null) { + return parameters_.size(); + } else { + return parametersBuilder_.getCount(); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameter getParameters(int index) { + if (parametersBuilder_ == null) { + return parameters_.get(index); + } else { + return parametersBuilder_.getMessage(index); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder setParameters( + int index, com.google.cloud.dataproc.v1.TemplateParameter value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParametersIsMutable(); + parameters_.set(index, value); + onChanged(); + } else { + parametersBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder setParameters( + int index, com.google.cloud.dataproc.v1.TemplateParameter.Builder builderForValue) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.set(index, builderForValue.build()); + onChanged(); + } else { + parametersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder addParameters(com.google.cloud.dataproc.v1.TemplateParameter value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParametersIsMutable(); + parameters_.add(value); + onChanged(); + } else { + parametersBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder addParameters( + int index, com.google.cloud.dataproc.v1.TemplateParameter value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParametersIsMutable(); + parameters_.add(index, value); + onChanged(); + } else { + parametersBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder addParameters( + com.google.cloud.dataproc.v1.TemplateParameter.Builder builderForValue) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.add(builderForValue.build()); + onChanged(); + } else { + parametersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder addParameters( + int index, com.google.cloud.dataproc.v1.TemplateParameter.Builder builderForValue) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.add(index, builderForValue.build()); + onChanged(); + } else { + parametersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder addAllParameters( + java.lang.Iterable values) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, parameters_); + onChanged(); + } else { + parametersBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder clearParameters() { + if (parametersBuilder_ == null) { + parameters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000100); + onChanged(); + } else { + parametersBuilder_.clear(); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public Builder removeParameters(int index) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.remove(index); + onChanged(); + } else { + parametersBuilder_.remove(index); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameter.Builder getParametersBuilder( + int index) { + return getParametersFieldBuilder().getBuilder(index); + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameterOrBuilder getParametersOrBuilder( + int index) { + if (parametersBuilder_ == null) { + return parameters_.get(index); } else { + return parametersBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public java.util.List + getParametersOrBuilderList() { + if (parametersBuilder_ != null) { + return parametersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(parameters_); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameter.Builder addParametersBuilder() { + return getParametersFieldBuilder().addBuilder( + com.google.cloud.dataproc.v1.TemplateParameter.getDefaultInstance()); + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1.TemplateParameter.Builder addParametersBuilder( + int index) { + return getParametersFieldBuilder().addBuilder( + index, com.google.cloud.dataproc.v1.TemplateParameter.getDefaultInstance()); + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + public java.util.List + getParametersBuilderList() { + return getParametersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.TemplateParameter, com.google.cloud.dataproc.v1.TemplateParameter.Builder, com.google.cloud.dataproc.v1.TemplateParameterOrBuilder> + getParametersFieldBuilder() { + if (parametersBuilder_ == null) { + parametersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1.TemplateParameter, com.google.cloud.dataproc.v1.TemplateParameter.Builder, com.google.cloud.dataproc.v1.TemplateParameterOrBuilder>( + parameters_, + ((bitField0_ & 0x00000100) == 0x00000100), + getParentForChildren(), + isClean()); + parameters_ = null; + } + return parametersBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.WorkflowTemplate) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowTemplate) + private static final com.google.cloud.dataproc.v1.WorkflowTemplate DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.WorkflowTemplate(); + } + + public static com.google.cloud.dataproc.v1.WorkflowTemplate getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public WorkflowTemplate parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WorkflowTemplate(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplate getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java new file mode 100644 index 000000000000..9396b0147340 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java @@ -0,0 +1,212 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.cloud.dataproc.v1; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import java.util.Map; +import java.util.ArrayList; +import java.util.List; + +// AUTO-GENERATED DOCUMENTATION AND CLASS +@javax.annotation.Generated("by GAPIC protoc plugin") +public class WorkflowTemplateName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding("projects/{project}/regions/{region}/workflowTemplates/{workflow_template}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String region; + private final String workflowTemplate; + + public String getProject() { + return project; + } + + public String getRegion() { + return region; + } + + public String getWorkflowTemplate() { + return workflowTemplate; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private WorkflowTemplateName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + region = Preconditions.checkNotNull(builder.getRegion()); + workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate()); + } + + public static WorkflowTemplateName of(String project, String region, String workflowTemplate) { + return newBuilder() + .setProject(project) + .setRegion(region) + .setWorkflowTemplate(workflowTemplate) + .build(); + } + + public static String format(String project, String region, String workflowTemplate) { + return newBuilder() + .setProject(project) + .setRegion(region) + .setWorkflowTemplate(workflowTemplate) + .build() + .toString(); + } + + public static WorkflowTemplateName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch(formattedString, "WorkflowTemplateName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("region"), matchMap.get("workflow_template")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (WorkflowTemplateName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("region", region); + fieldMapBuilder.put("workflowTemplate", workflowTemplate); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "region", region, "workflow_template", workflowTemplate); + } + + /** Builder for WorkflowTemplateName. */ + public static class Builder { + + private String project; + private String region; + private String workflowTemplate; + + public String getProject() { + return project; + } + + public String getRegion() { + return region; + } + + public String getWorkflowTemplate() { + return workflowTemplate; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setRegion(String region) { + this.region = region; + return this; + } + + public Builder setWorkflowTemplate(String workflowTemplate) { + this.workflowTemplate = workflowTemplate; + return this; + } + + private Builder() { + } + + private Builder(WorkflowTemplateName workflowTemplateName) { + project = workflowTemplateName.project; + region = workflowTemplateName.region; + workflowTemplate = workflowTemplateName.workflowTemplate; + } + + public WorkflowTemplateName build() { + return new WorkflowTemplateName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof WorkflowTemplateName) { + WorkflowTemplateName that = (WorkflowTemplateName) o; + return (this.project.equals(that.project)) + && (this.region.equals(that.region)) + && (this.workflowTemplate.equals(that.workflowTemplate)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= region.hashCode(); + h *= 1000003; + h ^= workflowTemplate.hashCode(); + return h; + } +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateOrBuilder.java new file mode 100644 index 000000000000..6ef2a183f296 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateOrBuilder.java @@ -0,0 +1,338 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface WorkflowTemplateOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.WorkflowTemplate) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. The template id.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string id = 2; + */ + java.lang.String getId(); + /** + *
+   * Required. The template id.
+   * The id must contain only letters (a-z, A-Z), numbers (0-9),
+   * underscores (_), and hyphens (-). Cannot begin or end with underscore
+   * or hyphen. Must consist of between 3 and 50 characters.
+   * 
+ * + * string id = 2; + */ + com.google.protobuf.ByteString + getIdBytes(); + + /** + *
+   * Output only. The "resource name" of the template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Output only. The "resource name" of the template, as described
+   * in https://cloud.google.com/apis/design/resource_names of the form
+   * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`
+   * 
+ * + * string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Optional. Used to perform a consistent read-modify-write.
+   * This field should be left blank for a `CreateWorkflowTemplate` request. It
+   * is required for an `UpdateWorkflowTemplate` request, and must match the
+   * current server version. A typical update template flow would fetch the
+   * current template with a `GetWorkflowTemplate` request, which will return
+   * the current template with the `version` field filled in with the
+   * current server version. The user updates other fields in the template,
+   * then returns it as part of the `UpdateWorkflowTemplate` request.
+   * 
+ * + * int32 version = 3; + */ + int getVersion(); + + /** + *
+   * Output only. The time template was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + boolean hasCreateTime(); + /** + *
+   * Output only. The time template was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + *
+   * Output only. The time template was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + *
+   * Output only. The time template was last updated.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + boolean hasUpdateTime(); + /** + *
+   * Output only. The time template was last updated.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + com.google.protobuf.Timestamp getUpdateTime(); + /** + *
+   * Output only. The time template was last updated.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 5; + */ + com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); + + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + int getLabelsCount(); + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + boolean containsLabels( + java.lang.String key); + /** + * Use {@link #getLabelsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getLabels(); + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + java.util.Map + getLabelsMap(); + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + + java.lang.String getLabelsOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Optional. The labels to associate with this template. These labels
+   * will be propagated to all jobs and clusters created by the workflow
+   * instance.
+   * Label **keys** must contain 1 to 63 characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * Label **values** may be empty, but, if present, must contain 1 to 63
+   * characters, and must conform to
+   * [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
+   * No more than 32 labels can be associated with a template.
+   * 
+ * + * map<string, string> labels = 6; + */ + + java.lang.String getLabelsOrThrow( + java.lang.String key); + + /** + *
+   * Required. WorkflowTemplate scheduling information.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + boolean hasPlacement(); + /** + *
+   * Required. WorkflowTemplate scheduling information.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getPlacement(); + /** + *
+   * Required. WorkflowTemplate scheduling information.
+   * 
+ * + * .google.cloud.dataproc.v1.WorkflowTemplatePlacement placement = 7; + */ + com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder getPlacementOrBuilder(); + + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + java.util.List + getJobsList(); + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + com.google.cloud.dataproc.v1.OrderedJob getJobs(int index); + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + int getJobsCount(); + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + java.util.List + getJobsOrBuilderList(); + /** + *
+   * Required. The Directed Acyclic Graph of Jobs to submit.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.OrderedJob jobs = 8; + */ + com.google.cloud.dataproc.v1.OrderedJobOrBuilder getJobsOrBuilder( + int index); + + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + java.util.List + getParametersList(); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + com.google.cloud.dataproc.v1.TemplateParameter getParameters(int index); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + int getParametersCount(); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + java.util.List + getParametersOrBuilderList(); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1.TemplateParameter parameters = 9; + */ + com.google.cloud.dataproc.v1.TemplateParameterOrBuilder getParametersOrBuilder( + int index); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacement.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacement.java new file mode 100644 index 000000000000..d3465547d3b5 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacement.java @@ -0,0 +1,1020 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +/** + *
+ * Specifies workflow execution target.
+ * Either `managed_cluster` or `cluster_selector` is required.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowTemplatePlacement} + */ +public final class WorkflowTemplatePlacement extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.WorkflowTemplatePlacement) + WorkflowTemplatePlacementOrBuilder { +private static final long serialVersionUID = 0L; + // Use WorkflowTemplatePlacement.newBuilder() to construct. + private WorkflowTemplatePlacement(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private WorkflowTemplatePlacement() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private WorkflowTemplatePlacement( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.cloud.dataproc.v1.ManagedCluster.Builder subBuilder = null; + if (placementCase_ == 1) { + subBuilder = ((com.google.cloud.dataproc.v1.ManagedCluster) placement_).toBuilder(); + } + placement_ = + input.readMessage(com.google.cloud.dataproc.v1.ManagedCluster.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.ManagedCluster) placement_); + placement_ = subBuilder.buildPartial(); + } + placementCase_ = 1; + break; + } + case 18: { + com.google.cloud.dataproc.v1.ClusterSelector.Builder subBuilder = null; + if (placementCase_ == 2) { + subBuilder = ((com.google.cloud.dataproc.v1.ClusterSelector) placement_).toBuilder(); + } + placement_ = + input.readMessage(com.google.cloud.dataproc.v1.ClusterSelector.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1.ClusterSelector) placement_); + placement_ = subBuilder.buildPartial(); + } + placementCase_ = 2; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.class, com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder.class); + } + + private int placementCase_ = 0; + private java.lang.Object placement_; + public enum PlacementCase + implements com.google.protobuf.Internal.EnumLite { + MANAGED_CLUSTER(1), + CLUSTER_SELECTOR(2), + PLACEMENT_NOT_SET(0); + private final int value; + private PlacementCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static PlacementCase valueOf(int value) { + return forNumber(value); + } + + public static PlacementCase forNumber(int value) { + switch (value) { + case 1: return MANAGED_CLUSTER; + case 2: return CLUSTER_SELECTOR; + case 0: return PLACEMENT_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public PlacementCase + getPlacementCase() { + return PlacementCase.forNumber( + placementCase_); + } + + public static final int MANAGED_CLUSTER_FIELD_NUMBER = 1; + /** + *
+   * Optional. A cluster that is managed by the workflow.
+   * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public boolean hasManagedCluster() { + return placementCase_ == 1; + } + /** + *
+   * Optional. A cluster that is managed by the workflow.
+   * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public com.google.cloud.dataproc.v1.ManagedCluster getManagedCluster() { + if (placementCase_ == 1) { + return (com.google.cloud.dataproc.v1.ManagedCluster) placement_; + } + return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } + /** + *
+   * Optional. A cluster that is managed by the workflow.
+   * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public com.google.cloud.dataproc.v1.ManagedClusterOrBuilder getManagedClusterOrBuilder() { + if (placementCase_ == 1) { + return (com.google.cloud.dataproc.v1.ManagedCluster) placement_; + } + return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } + + public static final int CLUSTER_SELECTOR_FIELD_NUMBER = 2; + /** + *
+   * Optional. A selector that chooses target cluster for jobs based
+   * on metadata.
+   * The selector is evaluated at the time each job is submitted.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public boolean hasClusterSelector() { + return placementCase_ == 2; + } + /** + *
+   * Optional. A selector that chooses target cluster for jobs based
+   * on metadata.
+   * The selector is evaluated at the time each job is submitted.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public com.google.cloud.dataproc.v1.ClusterSelector getClusterSelector() { + if (placementCase_ == 2) { + return (com.google.cloud.dataproc.v1.ClusterSelector) placement_; + } + return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } + /** + *
+   * Optional. A selector that chooses target cluster for jobs based
+   * on metadata.
+   * The selector is evaluated at the time each job is submitted.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder getClusterSelectorOrBuilder() { + if (placementCase_ == 2) { + return (com.google.cloud.dataproc.v1.ClusterSelector) placement_; + } + return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (placementCase_ == 1) { + output.writeMessage(1, (com.google.cloud.dataproc.v1.ManagedCluster) placement_); + } + if (placementCase_ == 2) { + output.writeMessage(2, (com.google.cloud.dataproc.v1.ClusterSelector) placement_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (placementCase_ == 1) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, (com.google.cloud.dataproc.v1.ManagedCluster) placement_); + } + if (placementCase_ == 2) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, (com.google.cloud.dataproc.v1.ClusterSelector) placement_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1.WorkflowTemplatePlacement)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement other = (com.google.cloud.dataproc.v1.WorkflowTemplatePlacement) obj; + + boolean result = true; + result = result && getPlacementCase().equals( + other.getPlacementCase()); + if (!result) return false; + switch (placementCase_) { + case 1: + result = result && getManagedCluster() + .equals(other.getManagedCluster()); + break; + case 2: + result = result && getClusterSelector() + .equals(other.getClusterSelector()); + break; + case 0: + default: + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + switch (placementCase_) { + case 1: + hash = (37 * hash) + MANAGED_CLUSTER_FIELD_NUMBER; + hash = (53 * hash) + getManagedCluster().hashCode(); + break; + case 2: + hash = (37 * hash) + CLUSTER_SELECTOR_FIELD_NUMBER; + hash = (53 * hash) + getClusterSelector().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Specifies workflow execution target.
+   * Either `managed_cluster` or `cluster_selector` is required.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1.WorkflowTemplatePlacement} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.WorkflowTemplatePlacement) + com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.class, com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + placementCase_ = 0; + placement_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement build() { + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement buildPartial() { + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement result = new com.google.cloud.dataproc.v1.WorkflowTemplatePlacement(this); + if (placementCase_ == 1) { + if (managedClusterBuilder_ == null) { + result.placement_ = placement_; + } else { + result.placement_ = managedClusterBuilder_.build(); + } + } + if (placementCase_ == 2) { + if (clusterSelectorBuilder_ == null) { + result.placement_ = placement_; + } else { + result.placement_ = clusterSelectorBuilder_.build(); + } + } + result.placementCase_ = placementCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1.WorkflowTemplatePlacement) { + return mergeFrom((com.google.cloud.dataproc.v1.WorkflowTemplatePlacement)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement other) { + if (other == com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance()) return this; + switch (other.getPlacementCase()) { + case MANAGED_CLUSTER: { + mergeManagedCluster(other.getManagedCluster()); + break; + } + case CLUSTER_SELECTOR: { + mergeClusterSelector(other.getClusterSelector()); + break; + } + case PLACEMENT_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1.WorkflowTemplatePlacement) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int placementCase_ = 0; + private java.lang.Object placement_; + public PlacementCase + getPlacementCase() { + return PlacementCase.forNumber( + placementCase_); + } + + public Builder clearPlacement() { + placementCase_ = 0; + placement_ = null; + onChanged(); + return this; + } + + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ManagedCluster, com.google.cloud.dataproc.v1.ManagedCluster.Builder, com.google.cloud.dataproc.v1.ManagedClusterOrBuilder> managedClusterBuilder_; + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public boolean hasManagedCluster() { + return placementCase_ == 1; + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public com.google.cloud.dataproc.v1.ManagedCluster getManagedCluster() { + if (managedClusterBuilder_ == null) { + if (placementCase_ == 1) { + return (com.google.cloud.dataproc.v1.ManagedCluster) placement_; + } + return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } else { + if (placementCase_ == 1) { + return managedClusterBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public Builder setManagedCluster(com.google.cloud.dataproc.v1.ManagedCluster value) { + if (managedClusterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + placement_ = value; + onChanged(); + } else { + managedClusterBuilder_.setMessage(value); + } + placementCase_ = 1; + return this; + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public Builder setManagedCluster( + com.google.cloud.dataproc.v1.ManagedCluster.Builder builderForValue) { + if (managedClusterBuilder_ == null) { + placement_ = builderForValue.build(); + onChanged(); + } else { + managedClusterBuilder_.setMessage(builderForValue.build()); + } + placementCase_ = 1; + return this; + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public Builder mergeManagedCluster(com.google.cloud.dataproc.v1.ManagedCluster value) { + if (managedClusterBuilder_ == null) { + if (placementCase_ == 1 && + placement_ != com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance()) { + placement_ = com.google.cloud.dataproc.v1.ManagedCluster.newBuilder((com.google.cloud.dataproc.v1.ManagedCluster) placement_) + .mergeFrom(value).buildPartial(); + } else { + placement_ = value; + } + onChanged(); + } else { + if (placementCase_ == 1) { + managedClusterBuilder_.mergeFrom(value); + } + managedClusterBuilder_.setMessage(value); + } + placementCase_ = 1; + return this; + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public Builder clearManagedCluster() { + if (managedClusterBuilder_ == null) { + if (placementCase_ == 1) { + placementCase_ = 0; + placement_ = null; + onChanged(); + } + } else { + if (placementCase_ == 1) { + placementCase_ = 0; + placement_ = null; + } + managedClusterBuilder_.clear(); + } + return this; + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public com.google.cloud.dataproc.v1.ManagedCluster.Builder getManagedClusterBuilder() { + return getManagedClusterFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + public com.google.cloud.dataproc.v1.ManagedClusterOrBuilder getManagedClusterOrBuilder() { + if ((placementCase_ == 1) && (managedClusterBuilder_ != null)) { + return managedClusterBuilder_.getMessageOrBuilder(); + } else { + if (placementCase_ == 1) { + return (com.google.cloud.dataproc.v1.ManagedCluster) placement_; + } + return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } + } + /** + *
+     * Optional. A cluster that is managed by the workflow.
+     * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ManagedCluster, com.google.cloud.dataproc.v1.ManagedCluster.Builder, com.google.cloud.dataproc.v1.ManagedClusterOrBuilder> + getManagedClusterFieldBuilder() { + if (managedClusterBuilder_ == null) { + if (!(placementCase_ == 1)) { + placement_ = com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance(); + } + managedClusterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ManagedCluster, com.google.cloud.dataproc.v1.ManagedCluster.Builder, com.google.cloud.dataproc.v1.ManagedClusterOrBuilder>( + (com.google.cloud.dataproc.v1.ManagedCluster) placement_, + getParentForChildren(), + isClean()); + placement_ = null; + } + placementCase_ = 1; + onChanged();; + return managedClusterBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterSelector, com.google.cloud.dataproc.v1.ClusterSelector.Builder, com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder> clusterSelectorBuilder_; + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public boolean hasClusterSelector() { + return placementCase_ == 2; + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public com.google.cloud.dataproc.v1.ClusterSelector getClusterSelector() { + if (clusterSelectorBuilder_ == null) { + if (placementCase_ == 2) { + return (com.google.cloud.dataproc.v1.ClusterSelector) placement_; + } + return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } else { + if (placementCase_ == 2) { + return clusterSelectorBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public Builder setClusterSelector(com.google.cloud.dataproc.v1.ClusterSelector value) { + if (clusterSelectorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + placement_ = value; + onChanged(); + } else { + clusterSelectorBuilder_.setMessage(value); + } + placementCase_ = 2; + return this; + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public Builder setClusterSelector( + com.google.cloud.dataproc.v1.ClusterSelector.Builder builderForValue) { + if (clusterSelectorBuilder_ == null) { + placement_ = builderForValue.build(); + onChanged(); + } else { + clusterSelectorBuilder_.setMessage(builderForValue.build()); + } + placementCase_ = 2; + return this; + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public Builder mergeClusterSelector(com.google.cloud.dataproc.v1.ClusterSelector value) { + if (clusterSelectorBuilder_ == null) { + if (placementCase_ == 2 && + placement_ != com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance()) { + placement_ = com.google.cloud.dataproc.v1.ClusterSelector.newBuilder((com.google.cloud.dataproc.v1.ClusterSelector) placement_) + .mergeFrom(value).buildPartial(); + } else { + placement_ = value; + } + onChanged(); + } else { + if (placementCase_ == 2) { + clusterSelectorBuilder_.mergeFrom(value); + } + clusterSelectorBuilder_.setMessage(value); + } + placementCase_ = 2; + return this; + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public Builder clearClusterSelector() { + if (clusterSelectorBuilder_ == null) { + if (placementCase_ == 2) { + placementCase_ = 0; + placement_ = null; + onChanged(); + } + } else { + if (placementCase_ == 2) { + placementCase_ = 0; + placement_ = null; + } + clusterSelectorBuilder_.clear(); + } + return this; + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public com.google.cloud.dataproc.v1.ClusterSelector.Builder getClusterSelectorBuilder() { + return getClusterSelectorFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + public com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder getClusterSelectorOrBuilder() { + if ((placementCase_ == 2) && (clusterSelectorBuilder_ != null)) { + return clusterSelectorBuilder_.getMessageOrBuilder(); + } else { + if (placementCase_ == 2) { + return (com.google.cloud.dataproc.v1.ClusterSelector) placement_; + } + return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } + } + /** + *
+     * Optional. A selector that chooses target cluster for jobs based
+     * on metadata.
+     * The selector is evaluated at the time each job is submitted.
+     * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterSelector, com.google.cloud.dataproc.v1.ClusterSelector.Builder, com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder> + getClusterSelectorFieldBuilder() { + if (clusterSelectorBuilder_ == null) { + if (!(placementCase_ == 2)) { + placement_ = com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance(); + } + clusterSelectorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1.ClusterSelector, com.google.cloud.dataproc.v1.ClusterSelector.Builder, com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder>( + (com.google.cloud.dataproc.v1.ClusterSelector) placement_, + getParentForChildren(), + isClean()); + placement_ = null; + } + placementCase_ = 2; + onChanged();; + return clusterSelectorBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.WorkflowTemplatePlacement) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowTemplatePlacement) + private static final com.google.cloud.dataproc.v1.WorkflowTemplatePlacement DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.WorkflowTemplatePlacement(); + } + + public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public WorkflowTemplatePlacement parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new WorkflowTemplatePlacement(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacementOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacementOrBuilder.java new file mode 100644 index 000000000000..e4d25d9294fc --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacementOrBuilder.java @@ -0,0 +1,67 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public interface WorkflowTemplatePlacementOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1.WorkflowTemplatePlacement) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Optional. A cluster that is managed by the workflow.
+   * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + boolean hasManagedCluster(); + /** + *
+   * Optional. A cluster that is managed by the workflow.
+   * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + com.google.cloud.dataproc.v1.ManagedCluster getManagedCluster(); + /** + *
+   * Optional. A cluster that is managed by the workflow.
+   * 
+ * + * .google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1; + */ + com.google.cloud.dataproc.v1.ManagedClusterOrBuilder getManagedClusterOrBuilder(); + + /** + *
+   * Optional. A selector that chooses target cluster for jobs based
+   * on metadata.
+   * The selector is evaluated at the time each job is submitted.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + boolean hasClusterSelector(); + /** + *
+   * Optional. A selector that chooses target cluster for jobs based
+   * on metadata.
+   * The selector is evaluated at the time each job is submitted.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + com.google.cloud.dataproc.v1.ClusterSelector getClusterSelector(); + /** + *
+   * Optional. A selector that chooses target cluster for jobs based
+   * on metadata.
+   * The selector is evaluated at the time each job is submitted.
+   * 
+ * + * .google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2; + */ + com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder getClusterSelectorOrBuilder(); + + public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.PlacementCase getPlacementCase(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatesProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatesProto.java new file mode 100644 index 000000000000..5c2756f9a4c7 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatesProto.java @@ -0,0 +1,514 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package com.google.cloud.dataproc.v1; + +public final class WorkflowTemplatesProto { + private WorkflowTemplatesProto() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowTemplate_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowTemplate_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowTemplate_LabelsEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowTemplate_LabelsEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ManagedCluster_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ManagedCluster_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ManagedCluster_LabelsEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ManagedCluster_LabelsEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ClusterSelector_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ClusterSelector_ClusterLabelsEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ClusterSelector_ClusterLabelsEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_OrderedJob_LabelsEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_OrderedJob_LabelsEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_TemplateParameter_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_TemplateParameter_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ParameterValidation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ParameterValidation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_RegexValidation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_RegexValidation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ValueValidation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ValueValidation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowMetadata_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowMetadata_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowMetadata_ParametersEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowMetadata_ParametersEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ClusterOperation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ClusterOperation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowGraph_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowGraph_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_WorkflowNode_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_WorkflowNode_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_CreateWorkflowTemplateRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_GetWorkflowTemplateRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_InstantiateWorkflowTemplateRequest_ParametersEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_InstantiateInlineWorkflowTemplateRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_UpdateWorkflowTemplateRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_ListWorkflowTemplatesResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1_DeleteWorkflowTemplateRequest_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n1google/cloud/dataproc/v1/workflow_temp" + + "lates.proto\022\030google.cloud.dataproc.v1\032\034g" + + "oogle/api/annotations.proto\032\'google/clou" + + "d/dataproc/v1/clusters.proto\032#google/clo" + + "ud/dataproc/v1/jobs.proto\032#google/longru" + + "nning/operations.proto\032\033google/protobuf/" + + "empty.proto\032\037google/protobuf/timestamp.p" + + "roto\"\323\003\n\020WorkflowTemplate\022\n\n\002id\030\002 \001(\t\022\014\n" + + "\004name\030\001 \001(\t\022\017\n\007version\030\003 \001(\005\022/\n\013create_t" + + "ime\030\004 \001(\0132\032.google.protobuf.Timestamp\022/\n" + + "\013update_time\030\005 \001(\0132\032.google.protobuf.Tim" + + "estamp\022F\n\006labels\030\006 \003(\01326.google.cloud.da" + + "taproc.v1.WorkflowTemplate.LabelsEntry\022F" + + "\n\tplacement\030\007 \001(\01323.google.cloud.datapro" + + "c.v1.WorkflowTemplatePlacement\0222\n\004jobs\030\010" + + " \003(\0132$.google.cloud.dataproc.v1.OrderedJ" + + "ob\022?\n\nparameters\030\t \003(\0132+.google.cloud.da" + + "taproc.v1.TemplateParameter\032-\n\013LabelsEnt" + + "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\264\001\n\031W" + + "orkflowTemplatePlacement\022C\n\017managed_clus" + + "ter\030\001 \001(\0132(.google.cloud.dataproc.v1.Man" + + "agedClusterH\000\022E\n\020cluster_selector\030\002 \001(\0132" + + ").google.cloud.dataproc.v1.ClusterSelect" + + "orH\000B\013\n\tplacement\"\324\001\n\016ManagedCluster\022\024\n\014" + + "cluster_name\030\002 \001(\t\0227\n\006config\030\003 \001(\0132\'.goo" + + "gle.cloud.dataproc.v1.ClusterConfig\022D\n\006l" + + "abels\030\004 \003(\01324.google.cloud.dataproc.v1.M" + + "anagedCluster.LabelsEntry\032-\n\013LabelsEntry" + + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\253\001\n\017Clu" + + "sterSelector\022\014\n\004zone\030\001 \001(\t\022T\n\016cluster_la" + + "bels\030\002 \003(\0132<.google.cloud.dataproc.v1.Cl" + + "usterSelector.ClusterLabelsEntry\0324\n\022Clus" + + "terLabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001" + + "(\t:\0028\001\"\323\004\n\nOrderedJob\022\017\n\007step_id\030\001 \001(\t\0229" + + "\n\nhadoop_job\030\002 \001(\0132#.google.cloud.datapr" + + "oc.v1.HadoopJobH\000\0227\n\tspark_job\030\003 \001(\0132\".g" + + "oogle.cloud.dataproc.v1.SparkJobH\000\022;\n\013py" + + "spark_job\030\004 \001(\0132$.google.cloud.dataproc." + + "v1.PySparkJobH\000\0225\n\010hive_job\030\005 \001(\0132!.goog" + + "le.cloud.dataproc.v1.HiveJobH\000\0223\n\007pig_jo" + + "b\030\006 \001(\0132 .google.cloud.dataproc.v1.PigJo" + + "bH\000\022>\n\rspark_sql_job\030\007 \001(\0132%.google.clou" + + "d.dataproc.v1.SparkSqlJobH\000\022@\n\006labels\030\010 " + + "\003(\01320.google.cloud.dataproc.v1.OrderedJo" + + "b.LabelsEntry\022;\n\nscheduling\030\t \001(\0132\'.goog" + + "le.cloud.dataproc.v1.JobScheduling\022\035\n\025pr" + + "erequisite_step_ids\030\n \003(\t\032-\n\013LabelsEntry" + + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\n\n\010job_" + + "type\"\211\001\n\021TemplateParameter\022\014\n\004name\030\001 \001(\t" + + "\022\016\n\006fields\030\002 \003(\t\022\023\n\013description\030\003 \001(\t\022A\n" + + "\nvalidation\030\004 \001(\0132-.google.cloud.datapro" + + "c.v1.ParameterValidation\"\241\001\n\023ParameterVa" + + "lidation\022:\n\005regex\030\001 \001(\0132).google.cloud.d" + + "ataproc.v1.RegexValidationH\000\022;\n\006values\030\002" + + " \001(\0132).google.cloud.dataproc.v1.ValueVal" + + "idationH\000B\021\n\017validation_type\"\"\n\017RegexVal" + + "idation\022\017\n\007regexes\030\001 \003(\t\"!\n\017ValueValidat" + + "ion\022\016\n\006values\030\001 \003(\t\"\375\004\n\020WorkflowMetadata" + + "\022\020\n\010template\030\001 \001(\t\022\017\n\007version\030\002 \001(\005\022B\n\016c" + + "reate_cluster\030\003 \001(\0132*.google.cloud.datap" + + "roc.v1.ClusterOperation\0226\n\005graph\030\004 \001(\0132\'" + + ".google.cloud.dataproc.v1.WorkflowGraph\022" + + "B\n\016delete_cluster\030\005 \001(\0132*.google.cloud.d" + + "ataproc.v1.ClusterOperation\022?\n\005state\030\006 \001" + + "(\01620.google.cloud.dataproc.v1.WorkflowMe" + + "tadata.State\022\024\n\014cluster_name\030\007 \001(\t\022N\n\npa" + + "rameters\030\010 \003(\0132:.google.cloud.dataproc.v" + + "1.WorkflowMetadata.ParametersEntry\022.\n\nst" + + "art_time\030\t \001(\0132\032.google.protobuf.Timesta" + + "mp\022,\n\010end_time\030\n \001(\0132\032.google.protobuf.T" + + "imestamp\022\024\n\014cluster_uuid\030\013 \001(\t\0321\n\017Parame" + + "tersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028" + + "\001\"8\n\005State\022\013\n\007UNKNOWN\020\000\022\013\n\007PENDING\020\001\022\013\n\007" + + "RUNNING\020\002\022\010\n\004DONE\020\003\"E\n\020ClusterOperation\022" + + "\024\n\014operation_id\030\001 \001(\t\022\r\n\005error\030\002 \001(\t\022\014\n\004" + + "done\030\003 \001(\010\"F\n\rWorkflowGraph\0225\n\005nodes\030\001 \003" + + "(\0132&.google.cloud.dataproc.v1.WorkflowNo" + + "de\"\212\002\n\014WorkflowNode\022\017\n\007step_id\030\001 \001(\t\022\035\n\025" + + "prerequisite_step_ids\030\002 \003(\t\022\016\n\006job_id\030\003 " + + "\001(\t\022?\n\005state\030\005 \001(\01620.google.cloud.datapr" + + "oc.v1.WorkflowNode.NodeState\022\r\n\005error\030\006 " + + "\001(\t\"j\n\tNodeState\022\032\n\026NODE_STATE_UNSPECIFI" + + "ED\020\000\022\013\n\007BLOCKED\020\001\022\014\n\010RUNNABLE\020\002\022\013\n\007RUNNI" + + "NG\020\003\022\r\n\tCOMPLETED\020\004\022\n\n\006FAILED\020\005\"m\n\035Creat" + + "eWorkflowTemplateRequest\022\016\n\006parent\030\001 \001(\t" + + "\022<\n\010template\030\002 \001(\0132*.google.cloud.datapr" + + "oc.v1.WorkflowTemplate\";\n\032GetWorkflowTem" + + "plateRequest\022\014\n\004name\030\001 \001(\t\022\017\n\007version\030\002 " + + "\001(\005\"\354\001\n\"InstantiateWorkflowTemplateReque" + + "st\022\014\n\004name\030\001 \001(\t\022\017\n\007version\030\002 \001(\005\022\022\n\nreq" + + "uest_id\030\005 \001(\t\022`\n\nparameters\030\006 \003(\0132L.goog" + + "le.cloud.dataproc.v1.InstantiateWorkflow" + + "TemplateRequest.ParametersEntry\0321\n\017Param" + + "etersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\002" + + "8\001\"\214\001\n(InstantiateInlineWorkflowTemplate" + + "Request\022\016\n\006parent\030\001 \001(\t\022<\n\010template\030\002 \001(" + + "\0132*.google.cloud.dataproc.v1.WorkflowTem" + + "plate\022\022\n\nrequest_id\030\003 \001(\t\"]\n\035UpdateWorkf" + + "lowTemplateRequest\022<\n\010template\030\001 \001(\0132*.g" + + "oogle.cloud.dataproc.v1.WorkflowTemplate" + + "\"U\n\034ListWorkflowTemplatesRequest\022\016\n\006pare" + + "nt\030\001 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_toke" + + "n\030\003 \001(\t\"w\n\035ListWorkflowTemplatesResponse" + + "\022=\n\ttemplates\030\001 \003(\0132*.google.cloud.datap" + + "roc.v1.WorkflowTemplate\022\027\n\017next_page_tok" + + "en\030\002 \001(\t\">\n\035DeleteWorkflowTemplateReques" + + "t\022\014\n\004name\030\001 \001(\t\022\017\n\007version\030\002 \001(\0052\342\016\n\027Wor" + + "kflowTemplateService\022\211\002\n\026CreateWorkflowT" + + "emplate\0227.google.cloud.dataproc.v1.Creat" + + "eWorkflowTemplateRequest\032*.google.cloud." + + "dataproc.v1.WorkflowTemplate\"\211\001\202\323\344\223\002\202\001\"5" + + "/v1/{parent=projects/*/locations/*}/work" + + "flowTemplates:\010templateZ?\"3/v1/{parent=p" + + "rojects/*/regions/*}/workflowTemplates:\010" + + "template\022\355\001\n\023GetWorkflowTemplate\0224.googl" + + "e.cloud.dataproc.v1.GetWorkflowTemplateR" + + "equest\032*.google.cloud.dataproc.v1.Workfl" + + "owTemplate\"t\202\323\344\223\002n\0225/v1/{name=projects/*" + + "/locations/*/workflowTemplates/*}Z5\0223/v1" + + "/{name=projects/*/regions/*/workflowTemp" + + "lates/*}\022\220\002\n\033InstantiateWorkflowTemplate" + + "\022<.google.cloud.dataproc.v1.InstantiateW" + + "orkflowTemplateRequest\032\035.google.longrunn" + + "ing.Operation\"\223\001\202\323\344\223\002\214\001\"A/v1/{name=proje" + + "cts/*/locations/*/workflowTemplates/*}:i" + + "nstantiate:\001*ZD\"?/v1/{name=projects/*/re" + + "gions/*/workflowTemplates/*}:instantiate" + + ":\001*\022\266\002\n!InstantiateInlineWorkflowTemplat" + + "e\022B.google.cloud.dataproc.v1.Instantiate" + + "InlineWorkflowTemplateRequest\032\035.google.l" + + "ongrunning.Operation\"\255\001\202\323\344\223\002\246\001\"G/v1/{par" + + "ent=projects/*/locations/*}/workflowTemp" + + "lates:instantiateInline:\010templateZQ\"E/v1" + + "/{parent=projects/*/regions/*}/workflowT" + + "emplates:instantiateInline:\010template\022\233\002\n" + + "\026UpdateWorkflowTemplate\0227.google.cloud.d" + + "ataproc.v1.UpdateWorkflowTemplateRequest" + + "\032*.google.cloud.dataproc.v1.WorkflowTemp" + + "late\"\233\001\202\323\344\223\002\224\001\032>/v1/{template.name=proje" + + "cts/*/locations/*/workflowTemplates/*}:\010" + + "templateZH\032 labels = 8; - // Output-only. Cluster status. + // Output only. Cluster status. ClusterStatus status = 4; - // Output-only. The previous cluster status. + // Output only. The previous cluster status. repeated ClusterStatus status_history = 7; - // Output-only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc + // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc // generates this value when it creates the cluster. string cluster_uuid = 6; @@ -106,7 +121,7 @@ message Cluster { // The cluster config. message ClusterConfig { - // Optional. A Google Cloud Storage staging bucket used for sharing generated + // Optional. A Cloud Storage staging bucket used for sharing generated // SSH keys and config. If you do not specify a staging bucket, Cloud // Dataproc will determine an appropriate Cloud Storage location (US, // ASIA, or EU) for your cluster's staging bucket according to the Google @@ -114,19 +129,19 @@ message ClusterConfig { // and manage this project-level, per-location bucket for you. string config_bucket = 1; - // Required. The shared Google Compute Engine config settings for + // Required. The shared Compute Engine config settings for // all instances in a cluster. GceClusterConfig gce_cluster_config = 8; - // Optional. The Google Compute Engine config settings for + // Optional. The Compute Engine config settings for // the master instance in a cluster. InstanceGroupConfig master_config = 9; - // Optional. The Google Compute Engine config settings for + // Optional. The Compute Engine config settings for // worker instances in a cluster. InstanceGroupConfig worker_config = 10; - // Optional. The Google Compute Engine config settings for + // Optional. The Compute Engine config settings for // additional worker instances in a cluster. InstanceGroupConfig secondary_worker_config = 12; @@ -145,12 +160,22 @@ message ClusterConfig { // ... worker specific actions ... // fi repeated NodeInitializationAction initialization_actions = 11; + + // Optional. Encryption settings for the cluster. + EncryptionConfig encryption_config = 15; } -// Common config settings for resources of Google Compute Engine cluster +// Encryption settings for the cluster. +message EncryptionConfig { + // Optional. The Cloud KMS key name to use for PD disk encryption for all + // instances in the cluster. + string gce_pd_kms_key_name = 1; +} + +// Common config settings for resources of Compute Engine cluster // instances, applicable to all instances in the cluster. message GceClusterConfig { - // Optional. The zone where the Google Compute Engine cluster will be located. + // Optional. The zone where the Compute Engine cluster will be located. // On a create request, it is required in the "global" region. If omitted // in a non-global Cloud Dataproc region, the service will pick a zone in the // corresponding Compute Engine region. On a get request, zone will @@ -163,7 +188,7 @@ message GceClusterConfig { // * `us-central1-f` string zone_uri = 1; - // Optional. The Google Compute Engine network to be used for machine + // Optional. The Compute Engine network to be used for machine // communications. Cannot be specified with subnetwork_uri. If neither // `network_uri` nor `subnetwork_uri` is specified, the "default" network of // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see @@ -176,7 +201,7 @@ message GceClusterConfig { // * `default` string network_uri = 2; - // Optional. The Google Compute Engine subnetwork to be used for machine + // Optional. The Compute Engine subnetwork to be used for machine // communications. Cannot be specified with network_uri. // // A full URL, partial URI, or short name are valid. Examples: @@ -195,8 +220,8 @@ message GceClusterConfig { bool internal_ip_only = 7; // Optional. The service account of the instances. Defaults to the default - // Google Compute Engine service account. Custom service accounts need - // permissions equivalent to the folloing IAM roles: + // Compute Engine service account. Custom service accounts need + // permissions equivalent to the following IAM roles: // // * roles/logging.logWriter // * roles/storage.objectAdmin @@ -206,7 +231,7 @@ message GceClusterConfig { // Example: `[account_id]@[project_id].iam.gserviceaccount.com` string service_account = 8; - // Optional. The URIs of service account scopes to be included in Google + // Optional. The URIs of service account scopes to be included in // Compute Engine instances. The following base set of scopes is always // included: // @@ -222,38 +247,43 @@ message GceClusterConfig { // * https://www.googleapis.com/auth/devstorage.full_control repeated string service_account_scopes = 3; - // The Google Compute Engine tags to add to all instances (see + // The Compute Engine tags to add to all instances (see // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). repeated string tags = 4; - // The Google Compute Engine metadata entries to add to all instances (see + // The Compute Engine metadata entries to add to all instances (see // [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). map metadata = 5; } -// Optional. The config settings for Google Compute Engine resources in +// Optional. The config settings for Compute Engine resources in // an instance group, such as a master or worker group. message InstanceGroupConfig { // Optional. The number of VM instances in the instance group. // For master instance groups, must be set to 1. int32 num_instances = 1; - // Optional. The list of instance names. Cloud Dataproc derives the names from - // `cluster_name`, `num_instances`, and the instance group if not set by user - // (recommended practice is to let Cloud Dataproc derive the name). + // Output only. The list of instance names. Cloud Dataproc derives the names + // from `cluster_name`, `num_instances`, and the instance group. repeated string instance_names = 2; - // Output-only. The Google Compute Engine image resource used for cluster - // instances. Inferred from `SoftwareConfig.image_version`. + // Optional. The Compute Engine image resource used for cluster + // instances. It can be specified or may be inferred from + // `SoftwareConfig.image_version`. string image_uri = 3; - // Optional. The Google Compute Engine machine type used for cluster instances. + // Optional. The Compute Engine machine type used for cluster instances. // // A full URL, partial URI, or short name are valid. Examples: // // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` // * `n1-standard-2` + // + // **Auto Zone Exception**: If you are using the Cloud Dataproc + // [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the machine type + // resource, for example, `n1-standard-2`. string machine_type_uri = 4; // Optional. Disk option config settings. @@ -262,12 +292,12 @@ message InstanceGroupConfig { // Optional. Specifies that this instance group contains preemptible instances. bool is_preemptible = 6; - // Output-only. The config for Google Compute Engine Instance Group + // Output only. The config for Compute Engine Instance Group // Manager that manages this group. // This is only used for preemptible instance groups. ManagedGroupConfig managed_group_config = 7; - // Optional. The Google Compute Engine accelerator configuration for these + // Optional. The Compute Engine accelerator configuration for these // instances. // // **Beta Feature**: This feature is still under development. It may be @@ -277,25 +307,31 @@ message InstanceGroupConfig { // Specifies the resources used to actively manage an instance group. message ManagedGroupConfig { - // Output-only. The name of the Instance Template used for the Managed + // Output only. The name of the Instance Template used for the Managed // Instance Group. string instance_template_name = 1; - // Output-only. The name of the Instance Group Manager for this group. + // Output only. The name of the Instance Group Manager for this group. string instance_group_manager_name = 2; } // Specifies the type and number of accelerator cards attached to the instances -// of an instance group (see [GPUs on Compute Engine](/compute/docs/gpus/)). +// of an instance. See [GPUs on Compute Engine](/compute/docs/gpus/). message AcceleratorConfig { // Full URL, partial URI, or short name of the accelerator type resource to - // expose to this instance. See [Google Compute Engine AcceleratorTypes]( - // /compute/docs/reference/beta/acceleratorTypes) + // expose to this instance. See + // [Compute Engine AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes). + // + // Examples: // - // Examples // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` // * `nvidia-tesla-k80` + // + // **Auto Zone Exception**: If you are using the Cloud Dataproc + // [Auto Zone Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the accelerator type + // resource, for example, `nvidia-tesla-k80`. string accelerator_type_uri = 1; // The number of the accelerator cards of this type exposed to this instance. @@ -304,6 +340,11 @@ message AcceleratorConfig { // Specifies the config of disk options for a group of VM instances. message DiskConfig { + // Optional. Type of the boot disk (default is "pd-standard"). + // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or + // "pd-standard" (Persistent Disk Hard Disk Drive). + string boot_disk_type = 3; + // Optional. Size in GB of the boot disk (default is 500GB). int32 boot_disk_size_gb = 1; @@ -319,7 +360,7 @@ message DiskConfig { // Specifies an executable to run on a fully configured node and a // timeout period for executable completion. message NodeInitializationAction { - // Required. Google Cloud Storage URI of executable file. + // Required. Cloud Storage URI of executable file. string executable_file = 1; // Optional. Amount of time executable has to complete. Default is @@ -352,7 +393,9 @@ message ClusterStatus { UPDATING = 5; } + // The cluster substate. enum Substate { + // The cluster substate is unknown. UNSPECIFIED = 0; // The cluster is known to be in an unhealthy state @@ -369,25 +412,27 @@ message ClusterStatus { STALE_STATUS = 2; } - // Output-only. The cluster's state. + // Output only. The cluster's state. State state = 1; - // Output-only. Optional details of cluster's state. + // Output only. Optional details of cluster's state. string detail = 2; - // Output-only. Time when this state was entered. + // Output only. Time when this state was entered. google.protobuf.Timestamp state_start_time = 3; - // Output-only. Additional state information that includes + // Output only. Additional state information that includes // status reported by the agent. Substate substate = 4; } // Specifies the selection and config of software inside the cluster. message SoftwareConfig { - // Optional. The version of software inside the cluster. It must match the - // regular expression `[0-9]+\.[0-9]+`. If unspecified, it defaults to the - // latest version (see [Cloud Dataproc Versioning](/dataproc/versioning)). + // Optional. The version of software inside the cluster. It must be one of the supported + // [Cloud Dataproc Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), + // such as "1.2" (including a subminor version, such as "1.2.29"), or the + // ["preview" version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). + // If unspecified, it defaults to the latest version. string image_version = 1; // Optional. The properties to set on daemon config files. @@ -434,6 +479,19 @@ message CreateClusterRequest { // Required. The cluster to create. Cluster cluster = 2; + + // Optional. A unique id used to identify the request. If the server + // receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same + // id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend + // is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 4; } // A request to update a cluster. @@ -451,6 +509,16 @@ message UpdateClusterRequest { // Required. The changes to the cluster. Cluster cluster = 3; + // Optional. Timeout for graceful YARN decomissioning. Graceful + // decommissioning allows removing nodes from the cluster without + // interrupting jobs in progress. Timeout specifies how long to wait for jobs + // in progress to finish before forcefully removing nodes (and potentially + // interrupting jobs). Default timeout is 0 (for forceful decommission), and + // the maximum allowed timeout is 1 day. + // + // Only supported on Dataproc image versions 1.2 and higher. + google.protobuf.Duration graceful_decommission_timeout = 6; + // Required. Specifies the path, relative to `Cluster`, of // the field to update. For example, to change the number of workers // in a cluster to 5, the `update_mask` parameter would be @@ -499,6 +567,19 @@ message UpdateClusterRequest { // // google.protobuf.FieldMask update_mask = 4; + + // Optional. A unique id used to identify the request. If the server + // receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same + // id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the + // backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 7; } // A request to delete a cluster. @@ -512,6 +593,23 @@ message DeleteClusterRequest { // Required. The cluster name. string cluster_name = 2; + + // Optional. Specifying the `cluster_uuid` means the RPC should fail + // (with error NOT_FOUND) if cluster with specified UUID does not exist. + string cluster_uuid = 4; + + // Optional. A unique id used to identify the request. If the server + // receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same + // id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the + // backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 5; } // Request to get the resource representation for a cluster in a project. @@ -566,10 +664,10 @@ message ListClustersRequest { // The list of all clusters in a project. message ListClustersResponse { - // Output-only. The clusters in the project. + // Output only. The clusters in the project. repeated Cluster clusters = 1; - // Output-only. This token is included in the response if there are more + // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent `ListClustersRequest`. string next_page_token = 2; @@ -590,7 +688,7 @@ message DiagnoseClusterRequest { // The location of diagnostic output. message DiagnoseClusterResults { - // Output-only. The Google Cloud Storage URI of the diagnostic output. + // Output only. The Cloud Storage URI of the diagnostic output. // The output report is a plain text file with a summary of collected // diagnostics. string output_uri = 1; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/jobs.proto b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/jobs.proto index 0eadc084708c..5bf067e0648f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/jobs.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/jobs.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; @@ -31,22 +32,32 @@ option java_package = "com.google.cloud.dataproc.v1"; service JobController { // Submits a job to a cluster. rpc SubmitJob(SubmitJobRequest) returns (Job) { - option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" body: "*" }; + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" + body: "*" + }; } // Gets the resource representation for a job in a project. rpc GetJob(GetJobRequest) returns (Job) { - option (google.api.http) = { get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.http) = { + get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + }; } // Lists regions/{region}/jobs in a project. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { - option (google.api.http) = { get: "/v1/projects/{project_id}/regions/{region}/jobs" }; + option (google.api.http) = { + get: "/v1/projects/{project_id}/regions/{region}/jobs" + }; } // Updates a job in a project. rpc UpdateJob(UpdateJobRequest) returns (Job) { - option (google.api.http) = { patch: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" body: "job" }; + option (google.api.http) = { + patch: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + body: "job" + }; } // Starts a job cancellation request. To access the job resource @@ -54,13 +65,18 @@ service JobController { // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) or // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). rpc CancelJob(CancelJobRequest) returns (Job) { - option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" body: "*" }; + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" + body: "*" + }; } // Deletes the job from the project. If the job is active, the delete fails, // and the response returns `FAILED_PRECONDITION`. rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.http) = { + delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + }; } } @@ -362,7 +378,7 @@ message JobPlacement { // Required. The name of the cluster where the job will be submitted. string cluster_name = 1; - // Output-only. A cluster UUID generated by the Cloud Dataproc service when + // Output only. A cluster UUID generated by the Cloud Dataproc service when // the job is submitted. string cluster_uuid = 2; } @@ -407,7 +423,9 @@ message JobStatus { ATTEMPT_FAILURE = 9; } + // The job substate. enum Substate { + // The job substate is unknown. UNSPECIFIED = 0; // The Job is submitted to the agent. @@ -430,17 +448,17 @@ message JobStatus { STALE_STATUS = 3; } - // Output-only. A state message specifying the overall job state. + // Output only. A state message specifying the overall job state. State state = 1; - // Output-only. Optional job state details, such as an error + // Output only. Optional job state details, such as an error // description if the state is ERROR. string details = 2; - // Output-only. The time when this state was entered. + // Output only. The time when this state was entered. google.protobuf.Timestamp state_start_time = 6; - // Output-only. Additional state information, which includes + // Output only. Additional state information, which includes // status reported by the agent. Substate substate = 7; } @@ -545,25 +563,25 @@ message Job { SparkSqlJob spark_sql_job = 12; } - // Output-only. The job status. Additional application-specific + // Output only. The job status. Additional application-specific // status information may be contained in the type_job // and yarn_applications fields. JobStatus status = 8; - // Output-only. The previous job status. + // Output only. The previous job status. repeated JobStatus status_history = 13; - // Output-only. The collection of YARN applications spun up by this job. + // Output only. The collection of YARN applications spun up by this job. // // **Beta** Feature: This report is available for testing purposes only. It may // be changed before final release. repeated YarnApplication yarn_applications = 9; - // Output-only. A URI pointing to the location of the stdout of the job's + // Output only. A URI pointing to the location of the stdout of the job's // driver program. string driver_output_resource_uri = 17; - // Output-only. If present, the location of miscellaneous control files + // Output only. If present, the location of miscellaneous control files // which may be used as part of job setup and handling. If not present, // control files may be placed in the same location as `driver_output_uri`. string driver_control_files_uri = 15; @@ -578,12 +596,14 @@ message Job { // Optional. Job scheduling configuration. JobScheduling scheduling = 20; + + // Output only. A UUID that uniquely identifies a job within the project + // over time. This is in contrast to a user-settable reference.job_id that + // may be reused over time. + string job_uuid = 22; } // Job scheduling options. -// -// **Beta Feature**: These options are available for testing purposes only. -// They may be changed before final release. message JobScheduling { // Optional. Maximum number of times per hour a driver may be restarted as // a result of driver terminating with non-zero code before job is @@ -607,6 +627,19 @@ message SubmitJobRequest { // Required. The job resource. Job job = 2; + + // Optional. A unique id used to identify the request. If the server + // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same + // id, then the second request will be ignored and the + // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend + // is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 4; } // A request to get the resource representation for a job in a project. @@ -704,7 +737,7 @@ message UpdateJobRequest { // A list of jobs in a project. message ListJobsResponse { - // Output-only. Jobs list. + // Output only. Jobs list. repeated Job jobs = 1; // Optional. This token is included in the response if there are more results diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/operations.proto b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/operations.proto index aeca8c8b9cdf..ba3ab3be056f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/operations.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/operations.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,14 +11,13 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.dataproc.v1; import "google/api/annotations.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; @@ -44,42 +43,42 @@ message ClusterOperationStatus { DONE = 3; } - // Output-only. A message containing the operation state. + // Output only. A message containing the operation state. State state = 1; - // Output-only. A message containing the detailed operation state. + // Output only. A message containing the detailed operation state. string inner_state = 2; - // Output-only.A message containing any operation metadata details. + // Output only. A message containing any operation metadata details. string details = 3; - // Output-only. The time this state was entered. + // Output only. The time this state was entered. google.protobuf.Timestamp state_start_time = 4; } // Metadata describing the operation. message ClusterOperationMetadata { - // Output-only. Name of the cluster for the operation. + // Output only. Name of the cluster for the operation. string cluster_name = 7; - // Output-only. Cluster UUID for the operation. + // Output only. Cluster UUID for the operation. string cluster_uuid = 8; - // Output-only. Current operation status. + // Output only. Current operation status. ClusterOperationStatus status = 9; - // Output-only. The previous operation status. + // Output only. The previous operation status. repeated ClusterOperationStatus status_history = 10; - // Output-only. The operation type. + // Output only. The operation type. string operation_type = 11; - // Output-only. Short description of operation. + // Output only. Short description of operation. string description = 12; - // Output-only. Labels associated with the operation + // Output only. Labels associated with the operation map labels = 13; - // Output-only. Errors encountered during operation execution. + // Output only. Errors encountered during operation execution. repeated string warnings = 14; } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/workflow_templates.proto b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/workflow_templates.proto new file mode 100644 index 000000000000..45ef8a22ea91 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1/src/main/proto/google/cloud/dataproc/v1/workflow_templates.proto @@ -0,0 +1,662 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/cloud/dataproc/v1/clusters.proto"; +import "google/cloud/dataproc/v1/jobs.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "WorkflowTemplatesProto"; +option java_package = "com.google.cloud.dataproc.v1"; + + +// The API interface for managing Workflow Templates in the +// Cloud Dataproc API. +service WorkflowTemplateService { + // Creates new workflow template. + rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/workflowTemplates" + body: "template" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/workflowTemplates" + body: "template" + } + }; + } + + // Retrieves the latest workflow template. + // + // Can retrieve previously instantiated template by specifying optional + // version parameter. + rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" + additional_bindings { + get: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" + } + }; + } + + // Instantiates a template and begins execution. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" + body: "*" + additional_bindings { + post: "/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate" + body: "*" + } + }; + } + + // Instantiates a template and begins execution. + // + // This method is equivalent to executing the sequence + // [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" + body: "template" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" + body: "template" + } + }; + } + + // Updates (replaces) workflow template. The updated template + // must contain version that matches the current server version. + rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) { + option (google.api.http) = { + put: "/v1/{template.name=projects/*/locations/*/workflowTemplates/*}" + body: "template" + additional_bindings { + put: "/v1/{template.name=projects/*/regions/*/workflowTemplates/*}" + body: "template" + } + }; + } + + // Lists workflows that match the specified filter in the request. + rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/workflowTemplates" + additional_bindings { + get: "/v1/{parent=projects/*/regions/*}/workflowTemplates" + } + }; + } + + // Deletes a workflow template. It does not cancel in-progress workflows. + rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" + additional_bindings { + delete: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" + } + }; + } +} + +// A Cloud Dataproc workflow template resource. +message WorkflowTemplate { + // Required. The template id. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + string id = 2; + + // Output only. The "resource name" of the template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + string name = 1; + + // Optional. Used to perform a consistent read-modify-write. + // + // This field should be left blank for a `CreateWorkflowTemplate` request. It + // is required for an `UpdateWorkflowTemplate` request, and must match the + // current server version. A typical update template flow would fetch the + // current template with a `GetWorkflowTemplate` request, which will return + // the current template with the `version` field filled in with the + // current server version. The user updates other fields in the template, + // then returns it as part of the `UpdateWorkflowTemplate` request. + int32 version = 3; + + // Output only. The time template was created. + google.protobuf.Timestamp create_time = 4; + + // Output only. The time template was last updated. + google.protobuf.Timestamp update_time = 5; + + // Optional. The labels to associate with this template. These labels + // will be propagated to all jobs and clusters created by the workflow + // instance. + // + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // No more than 32 labels can be associated with a template. + map labels = 6; + + // Required. WorkflowTemplate scheduling information. + WorkflowTemplatePlacement placement = 7; + + // Required. The Directed Acyclic Graph of Jobs to submit. + repeated OrderedJob jobs = 8; + + // Optional. Template parameters whose values are substituted into the + // template. Values for parameters must be provided when the template is + // instantiated. + repeated TemplateParameter parameters = 9; +} + +// Specifies workflow execution target. +// +// Either `managed_cluster` or `cluster_selector` is required. +message WorkflowTemplatePlacement { + // Required. Specifies where workflow executes; either on a managed + // cluster or an existing cluster chosen by labels. + oneof placement { + // Optional. A cluster that is managed by the workflow. + ManagedCluster managed_cluster = 1; + + // Optional. A selector that chooses target cluster for jobs based + // on metadata. + // + // The selector is evaluated at the time each job is submitted. + ClusterSelector cluster_selector = 2; + } +} + +// Cluster that is managed by the workflow. +message ManagedCluster { + // Required. The cluster name prefix. A unique cluster name will be formed by + // appending a random suffix. + // + // The name must contain only lower-case letters (a-z), numbers (0-9), + // and hyphens (-). Must begin with a letter. Cannot begin or end with + // hyphen. Must consist of between 2 and 35 characters. + string cluster_name = 2; + + // Required. The cluster configuration. + ClusterConfig config = 3; + + // Optional. The labels to associate with this cluster. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given cluster. + map labels = 4; +} + +// A selector that chooses target cluster for jobs based on metadata. +message ClusterSelector { + // Optional. The zone where workflow process executes. This parameter does not + // affect the selection of the cluster. + // + // If unspecified, the zone of the first cluster matching the selector + // is used. + string zone = 1; + + // Required. The cluster labels. Cluster must have all labels + // to match. + map cluster_labels = 2; +} + +// A job executed by the workflow. +message OrderedJob { + // Required. The step id. The id must be unique among all jobs + // within the template. + // + // The step id is used as prefix for job id, as job + // `goog-dataproc-workflow-step-id` label, and in + // [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other + // steps. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + string step_id = 1; + + // Required. The job definition. + oneof job_type { + // Job is a Hadoop job. + HadoopJob hadoop_job = 2; + + // Job is a Spark job. + SparkJob spark_job = 3; + + // Job is a Pyspark job. + PySparkJob pyspark_job = 4; + + // Job is a Hive job. + HiveJob hive_job = 5; + + // Job is a Pig job. + PigJob pig_job = 6; + + // Job is a SparkSql job. + SparkSqlJob spark_sql_job = 7; + } + + // Optional. The labels to associate with this job. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given job. + map labels = 8; + + // Optional. Job scheduling configuration. + JobScheduling scheduling = 9; + + // Optional. The optional list of prerequisite job step_ids. + // If not specified, the job will start at the beginning of workflow. + repeated string prerequisite_step_ids = 10; +} + +// A configurable parameter that replaces one or more fields in the template. +// Parameterizable fields: +// - Labels +// - File uris +// - Job properties +// - Job arguments +// - Script variables +// - Main class (in HadoopJob and SparkJob) +// - Zone (in ClusterSelector) +message TemplateParameter { + // Required. Parameter name. + // The parameter name is used as the key, and paired with the + // parameter value, which are passed to the template when the template + // is instantiated. + // The name must contain only capital letters (A-Z), numbers (0-9), and + // underscores (_), and must not start with a number. The maximum length is + // 40 characters. + string name = 1; + + // Required. Paths to all fields that the parameter replaces. + // A field is allowed to appear in at most one parameter's list of field + // paths. + // + // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. + // For example, a field path that references the zone field of a workflow + // template's cluster selector would be specified as + // `placement.clusterSelector.zone`. + // + // Also, field paths can reference fields using the following syntax: + // + // * Values in maps can be referenced by key: + // * labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * placement.managedCluster.labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * jobs['step-id'].labels['key'] + // + // * Jobs in the jobs list can be referenced by step-id: + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * jobs['step-id'].hiveJob.queryFileUri + // * jobs['step-id'].pySparkJob.mainPythonFileUri + // * jobs['step-id'].hadoopJob.jarFileUris[0] + // * jobs['step-id'].hadoopJob.archiveUris[0] + // * jobs['step-id'].hadoopJob.fileUris[0] + // * jobs['step-id'].pySparkJob.pythonFileUris[0] + // + // * Items in repeated fields can be referenced by a zero-based index: + // * jobs['step-id'].sparkJob.args[0] + // + // * Other examples: + // * jobs['step-id'].hadoopJob.properties['key'] + // * jobs['step-id'].hadoopJob.args[0] + // * jobs['step-id'].hiveJob.scriptVariables['key'] + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * placement.clusterSelector.zone + // + // It may not be possible to parameterize maps and repeated fields in their + // entirety since only individual map values and individual items in repeated + // fields can be referenced. For example, the following field paths are + // invalid: + // + // - placement.clusterSelector.clusterLabels + // - jobs['step-id'].sparkJob.args + repeated string fields = 2; + + // Optional. Brief description of the parameter. + // Must not exceed 1024 characters. + string description = 3; + + // Optional. Validation rules to be applied to this parameter's value. + ParameterValidation validation = 4; +} + +// Configuration for parameter validation. +message ParameterValidation { + // Required. The type of validation to be performed. + oneof validation_type { + // Validation based on regular expressions. + RegexValidation regex = 1; + + // Validation based on a list of allowed values. + ValueValidation values = 2; + } +} + +// Validation based on regular expressions. +message RegexValidation { + // Required. RE2 regular expressions used to validate the parameter's value. + // The value must match the regex in its entirety (substring + // matches are not sufficient). + repeated string regexes = 1; +} + +// Validation based on a list of allowed values. +message ValueValidation { + // Required. List of allowed values for the parameter. + repeated string values = 1; +} + +// A Cloud Dataproc workflow template resource. +message WorkflowMetadata { + // The operation state. + enum State { + // Unused. + UNKNOWN = 0; + + // The operation has been created. + PENDING = 1; + + // The operation is running. + RUNNING = 2; + + // The operation is done; either cancelled or completed. + DONE = 3; + } + + // Output only. The "resource name" of the template. + string template = 1; + + // Output only. The version of template at the time of + // workflow instantiation. + int32 version = 2; + + // Output only. The create cluster operation metadata. + ClusterOperation create_cluster = 3; + + // Output only. The workflow graph. + WorkflowGraph graph = 4; + + // Output only. The delete cluster operation metadata. + ClusterOperation delete_cluster = 5; + + // Output only. The workflow state. + State state = 6; + + // Output only. The name of the target cluster. + string cluster_name = 7; + + // Map from parameter names to values that were used for those parameters. + map parameters = 8; + + // Output only. Workflow start time. + google.protobuf.Timestamp start_time = 9; + + // Output only. Workflow end time. + google.protobuf.Timestamp end_time = 10; + + // Output only. The UUID of target cluster. + string cluster_uuid = 11; +} + +// The cluster operation triggered by a workflow. +message ClusterOperation { + // Output only. The id of the cluster operation. + string operation_id = 1; + + // Output only. Error, if operation failed. + string error = 2; + + // Output only. Indicates the operation is done. + bool done = 3; +} + +// The workflow graph. +message WorkflowGraph { + // Output only. The workflow nodes. + repeated WorkflowNode nodes = 1; +} + +// The workflow node. +message WorkflowNode { + // The workflow node state. + enum NodeState { + // State is unspecified. + NODE_STATE_UNSPECIFIED = 0; + + // The node is awaiting prerequisite node to finish. + BLOCKED = 1; + + // The node is runnable but not running. + RUNNABLE = 2; + + // The node is running. + RUNNING = 3; + + // The node completed successfully. + COMPLETED = 4; + + // The node failed. A node can be marked FAILED because + // its ancestor or peer failed. + FAILED = 5; + } + + // Output only. The name of the node. + string step_id = 1; + + // Output only. Node's prerequisite nodes. + repeated string prerequisite_step_ids = 2; + + // Output only. The job id; populated after the node enters RUNNING state. + string job_id = 3; + + // Output only. The node state. + NodeState state = 5; + + // Output only. The error detail. + string error = 6; +} + +// A request to create a workflow template. +message CreateWorkflowTemplateRequest { + // Required. The "resource name" of the region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + string parent = 1; + + // Required. The Dataproc workflow template to create. + WorkflowTemplate template = 2; +} + +// A request to fetch a workflow template. +message GetWorkflowTemplateRequest { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + string name = 1; + + // Optional. The version of workflow template to retrieve. Only previously + // instatiated versions can be retrieved. + // + // If unspecified, retrieves the current version. + int32 version = 2; +} + +// A request to instantiate a workflow template. +message InstantiateWorkflowTemplateRequest { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + string name = 1; + + // Optional. The version of workflow template to instantiate. If specified, + // the workflow will be instantiated only if the current version of + // the workflow template has the supplied version. + // + // This option cannot be used to instantiate a previous version of + // workflow template. + int32 version = 2; + + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 5; + + // Optional. Map from parameter names to values that should be used for those + // parameters. Values may not exceed 100 characters. + map parameters = 6; +} + +// A request to instantiate an inline workflow template. +message InstantiateInlineWorkflowTemplateRequest { + // Required. The "resource name" of the workflow template region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + string parent = 1; + + // Required. The workflow template to instantiate. + WorkflowTemplate template = 2; + + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 3; +} + +// A request to update a workflow template. +message UpdateWorkflowTemplateRequest { + // Required. The updated workflow template. + // + // The `template.version` field must match the current version. + WorkflowTemplate template = 1; +} + +// A request to list workflow templates in a project. +message ListWorkflowTemplatesRequest { + // Required. The "resource name" of the region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + string parent = 1; + + // Optional. The maximum number of results to return in each response. + int32 page_size = 2; + + // Optional. The page token, returned by a previous call, to request the + // next page of results. + string page_token = 3; +} + +// A response to a request to list workflow templates in a project. +message ListWorkflowTemplatesResponse { + // Output only. WorkflowTemplates list. + repeated WorkflowTemplate templates = 1; + + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // page_token in a subsequent ListWorkflowTemplatesRequest. + string next_page_token = 2; +} + +// A request to delete a workflow template. +// +// Currently started workflows will remain running. +message DeleteWorkflowTemplateRequest { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + string name = 1; + + // Optional. The version of workflow template to delete. If specified, + // will only delete the template if the current server version matches + // specified version. + int32 version = 2; +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Cluster.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Cluster.java index 35f46528be92..c3ed80a0433b 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Cluster.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Cluster.java @@ -548,7 +548,7 @@ public java.lang.String getClusterUuid() { private com.google.cloud.dataproc.v1beta2.ClusterMetrics metrics_; /** *
-   * Contains cluster daemon metrics such as HDFS and YARN stats.
+   * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
    * **Beta Feature**: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -560,7 +560,7 @@ public boolean hasMetrics() { } /** *
-   * Contains cluster daemon metrics such as HDFS and YARN stats.
+   * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
    * **Beta Feature**: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -572,7 +572,7 @@ public com.google.cloud.dataproc.v1beta2.ClusterMetrics getMetrics() { } /** *
-   * Contains cluster daemon metrics such as HDFS and YARN stats.
+   * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
    * **Beta Feature**: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -2218,7 +2218,7 @@ public Builder setClusterUuidBytes( com.google.cloud.dataproc.v1beta2.ClusterMetrics, com.google.cloud.dataproc.v1beta2.ClusterMetrics.Builder, com.google.cloud.dataproc.v1beta2.ClusterMetricsOrBuilder> metricsBuilder_; /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2230,7 +2230,7 @@ public boolean hasMetrics() { } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2246,7 +2246,7 @@ public com.google.cloud.dataproc.v1beta2.ClusterMetrics getMetrics() { } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2268,7 +2268,7 @@ public Builder setMetrics(com.google.cloud.dataproc.v1beta2.ClusterMetrics value } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2288,7 +2288,7 @@ public Builder setMetrics( } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2312,7 +2312,7 @@ public Builder mergeMetrics(com.google.cloud.dataproc.v1beta2.ClusterMetrics val } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2332,7 +2332,7 @@ public Builder clearMetrics() { } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2346,7 +2346,7 @@ public com.google.cloud.dataproc.v1beta2.ClusterMetrics.Builder getMetricsBuilde } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
@@ -2363,7 +2363,7 @@ public com.google.cloud.dataproc.v1beta2.ClusterMetricsOrBuilder getMetricsOrBui } /** *
-     * Contains cluster daemon metrics such as HDFS and YARN stats.
+     * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
      * **Beta Feature**: This report is available for testing purposes only. It may
      * be changed before final release.
      * 
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java index 7bc95dfcbc9e..e5241df2c307 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java @@ -141,6 +141,19 @@ private ClusterConfig( break; } + case 122: { + com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder subBuilder = null; + if (encryptionConfig_ != null) { + subBuilder = encryptionConfig_.toBuilder(); + } + encryptionConfig_ = input.readMessage(com.google.cloud.dataproc.v1beta2.EncryptionConfig.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(encryptionConfig_); + encryptionConfig_ = subBuilder.buildPartial(); + } + + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -539,6 +552,39 @@ public com.google.cloud.dataproc.v1beta2.NodeInitializationActionOrBuilder getIn return initializationActions_.get(index); } + public static final int ENCRYPTION_CONFIG_FIELD_NUMBER = 15; + private com.google.cloud.dataproc.v1beta2.EncryptionConfig encryptionConfig_; + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public boolean hasEncryptionConfig() { + return encryptionConfig_ != null; + } + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1beta2.EncryptionConfig getEncryptionConfig() { + return encryptionConfig_ == null ? com.google.cloud.dataproc.v1beta2.EncryptionConfig.getDefaultInstance() : encryptionConfig_; + } + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder() { + return getEncryptionConfig(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -577,6 +623,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (lifecycleConfig_ != null) { output.writeMessage(14, getLifecycleConfig()); } + if (encryptionConfig_ != null) { + output.writeMessage(15, getEncryptionConfig()); + } unknownFields.writeTo(output); } @@ -617,6 +666,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(14, getLifecycleConfig()); } + if (encryptionConfig_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(15, getEncryptionConfig()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -667,6 +720,11 @@ public boolean equals(final java.lang.Object obj) { } result = result && getInitializationActionsList() .equals(other.getInitializationActionsList()); + result = result && (hasEncryptionConfig() == other.hasEncryptionConfig()); + if (hasEncryptionConfig()) { + result = result && getEncryptionConfig() + .equals(other.getEncryptionConfig()); + } result = result && unknownFields.equals(other.unknownFields); return result; } @@ -708,6 +766,10 @@ public int hashCode() { hash = (37 * hash) + INITIALIZATION_ACTIONS_FIELD_NUMBER; hash = (53 * hash) + getInitializationActionsList().hashCode(); } + if (hasEncryptionConfig()) { + hash = (37 * hash) + ENCRYPTION_CONFIG_FIELD_NUMBER; + hash = (53 * hash) + getEncryptionConfig().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -890,6 +952,12 @@ public Builder clear() { } else { initializationActionsBuilder_.clear(); } + if (encryptionConfigBuilder_ == null) { + encryptionConfig_ = null; + } else { + encryptionConfig_ = null; + encryptionConfigBuilder_ = null; + } return this; } @@ -958,6 +1026,11 @@ public com.google.cloud.dataproc.v1beta2.ClusterConfig buildPartial() { } else { result.initializationActions_ = initializationActionsBuilder_.build(); } + if (encryptionConfigBuilder_ == null) { + result.encryptionConfig_ = encryptionConfig_; + } else { + result.encryptionConfig_ = encryptionConfigBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1055,6 +1128,9 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.ClusterConfig other) } } } + if (other.hasEncryptionConfig()) { + mergeEncryptionConfig(other.getEncryptionConfig()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2626,6 +2702,159 @@ public com.google.cloud.dataproc.v1beta2.NodeInitializationAction.Builder addIni } return initializationActionsBuilder_; } + + private com.google.cloud.dataproc.v1beta2.EncryptionConfig encryptionConfig_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.EncryptionConfig, com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder, com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder> encryptionConfigBuilder_; + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public boolean hasEncryptionConfig() { + return encryptionConfigBuilder_ != null || encryptionConfig_ != null; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1beta2.EncryptionConfig getEncryptionConfig() { + if (encryptionConfigBuilder_ == null) { + return encryptionConfig_ == null ? com.google.cloud.dataproc.v1beta2.EncryptionConfig.getDefaultInstance() : encryptionConfig_; + } else { + return encryptionConfigBuilder_.getMessage(); + } + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public Builder setEncryptionConfig(com.google.cloud.dataproc.v1beta2.EncryptionConfig value) { + if (encryptionConfigBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encryptionConfig_ = value; + onChanged(); + } else { + encryptionConfigBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public Builder setEncryptionConfig( + com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder builderForValue) { + if (encryptionConfigBuilder_ == null) { + encryptionConfig_ = builderForValue.build(); + onChanged(); + } else { + encryptionConfigBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public Builder mergeEncryptionConfig(com.google.cloud.dataproc.v1beta2.EncryptionConfig value) { + if (encryptionConfigBuilder_ == null) { + if (encryptionConfig_ != null) { + encryptionConfig_ = + com.google.cloud.dataproc.v1beta2.EncryptionConfig.newBuilder(encryptionConfig_).mergeFrom(value).buildPartial(); + } else { + encryptionConfig_ = value; + } + onChanged(); + } else { + encryptionConfigBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public Builder clearEncryptionConfig() { + if (encryptionConfigBuilder_ == null) { + encryptionConfig_ = null; + onChanged(); + } else { + encryptionConfig_ = null; + encryptionConfigBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder getEncryptionConfigBuilder() { + + onChanged(); + return getEncryptionConfigFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + public com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder() { + if (encryptionConfigBuilder_ != null) { + return encryptionConfigBuilder_.getMessageOrBuilder(); + } else { + return encryptionConfig_ == null ? + com.google.cloud.dataproc.v1beta2.EncryptionConfig.getDefaultInstance() : encryptionConfig_; + } + } + /** + *
+     * Optional. Encryption settings for the cluster.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.EncryptionConfig, com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder, com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder> + getEncryptionConfigFieldBuilder() { + if (encryptionConfigBuilder_ == null) { + encryptionConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.EncryptionConfig, com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder, com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder>( + getEncryptionConfig(), + getParentForChildren(), + isClean()); + encryptionConfig_ = null; + } + return encryptionConfigBuilder_; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java index f43a86d5cf11..0224e81538b8 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java @@ -285,4 +285,29 @@ public interface ClusterConfigOrBuilder extends */ com.google.cloud.dataproc.v1beta2.NodeInitializationActionOrBuilder getInitializationActionsOrBuilder( int index); + + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + boolean hasEncryptionConfig(); + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + com.google.cloud.dataproc.v1beta2.EncryptionConfig getEncryptionConfig(); + /** + *
+   * Optional. Encryption settings for the cluster.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.EncryptionConfig encryption_config = 15; + */ + com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOrBuilder.java index d4daae5608a7..afc7eb571d00 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOrBuilder.java @@ -243,7 +243,7 @@ com.google.cloud.dataproc.v1beta2.ClusterStatusOrBuilder getStatusHistoryOrBuild /** *
-   * Contains cluster daemon metrics such as HDFS and YARN stats.
+   * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
    * **Beta Feature**: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -253,7 +253,7 @@ com.google.cloud.dataproc.v1beta2.ClusterStatusOrBuilder getStatusHistoryOrBuild boolean hasMetrics(); /** *
-   * Contains cluster daemon metrics such as HDFS and YARN stats.
+   * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
    * **Beta Feature**: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
@@ -263,7 +263,7 @@ com.google.cloud.dataproc.v1beta2.ClusterStatusOrBuilder getStatusHistoryOrBuild com.google.cloud.dataproc.v1beta2.ClusterMetrics getMetrics(); /** *
-   * Contains cluster daemon metrics such as HDFS and YARN stats.
+   * Output only. Contains cluster daemon metrics such as HDFS and YARN stats.
    * **Beta Feature**: This report is available for testing purposes only. It may
    * be changed before final release.
    * 
diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java index 31d41dd5a1e8..ae77f89f3faa 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java @@ -29,6 +29,11 @@ public static void registerAllExtensions( static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_dataproc_v1beta2_ClusterConfig_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_dataproc_v1beta2_GceClusterConfig_descriptor; static final @@ -166,7 +171,7 @@ public static void registerAllExtensions( "tus\022\024\n\014cluster_uuid\030\006 \001(\t\022>\n\007metrics\030\t \001" + "(\0132-.google.cloud.dataproc.v1beta2.Clust" + "erMetrics\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n" + - "\005value\030\002 \001(\t:\0028\001\"\311\004\n\rClusterConfig\022\025\n\rco" + + "\005value\030\002 \001(\t:\0028\001\"\225\005\n\rClusterConfig\022\025\n\rco" + "nfig_bucket\030\001 \001(\t\022K\n\022gce_cluster_config\030" + "\010 \001(\0132/.google.cloud.dataproc.v1beta2.Gc" + "eClusterConfig\022I\n\rmaster_config\030\t \001(\01322." + @@ -181,115 +186,118 @@ public static void registerAllExtensions( "ud.dataproc.v1beta2.LifecycleConfig\022W\n\026i" + "nitialization_actions\030\013 \003(\01327.google.clo" + "ud.dataproc.v1beta2.NodeInitializationAc" + - "tion\"\264\002\n\020GceClusterConfig\022\020\n\010zone_uri\030\001 " + - "\001(\t\022\023\n\013network_uri\030\002 \001(\t\022\026\n\016subnetwork_u" + - "ri\030\006 \001(\t\022\030\n\020internal_ip_only\030\007 \001(\010\022\027\n\017se" + - "rvice_account\030\010 \001(\t\022\036\n\026service_account_s" + - "copes\030\003 \003(\t\022\014\n\004tags\030\004 \003(\t\022O\n\010metadata\030\005 " + - "\003(\0132=.google.cloud.dataproc.v1beta2.GceC" + - "lusterConfig.MetadataEntry\032/\n\rMetadataEn" + - "try\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\374\002\n\023" + - "InstanceGroupConfig\022\025\n\rnum_instances\030\001 \001" + - "(\005\022\026\n\016instance_names\030\002 \003(\t\022\021\n\timage_uri\030" + - "\003 \001(\t\022\030\n\020machine_type_uri\030\004 \001(\t\022>\n\013disk_" + - "config\030\005 \001(\0132).google.cloud.dataproc.v1b" + - "eta2.DiskConfig\022\026\n\016is_preemptible\030\006 \001(\010\022" + - "O\n\024managed_group_config\030\007 \001(\01321.google.c" + - "loud.dataproc.v1beta2.ManagedGroupConfig" + - "\022F\n\014accelerators\030\010 \003(\01320.google.cloud.da" + - "taproc.v1beta2.AcceleratorConfig\022\030\n\020min_" + - "cpu_platform\030\t \001(\t\"Y\n\022ManagedGroupConfig" + - "\022\036\n\026instance_template_name\030\001 \001(\t\022#\n\033inst" + - "ance_group_manager_name\030\002 \001(\t\"L\n\021Acceler" + - "atorConfig\022\034\n\024accelerator_type_uri\030\001 \001(\t" + - "\022\031\n\021accelerator_count\030\002 \001(\005\"W\n\nDiskConfi" + - "g\022\026\n\016boot_disk_type\030\003 \001(\t\022\031\n\021boot_disk_s" + - "ize_gb\030\001 \001(\005\022\026\n\016num_local_ssds\030\002 \001(\005\"\272\001\n" + - "\017LifecycleConfig\0222\n\017idle_delete_ttl\030\001 \001(" + - "\0132\031.google.protobuf.Duration\0226\n\020auto_del" + - "ete_time\030\002 \001(\0132\032.google.protobuf.Timesta" + - "mpH\000\0224\n\017auto_delete_ttl\030\003 \001(\0132\031.google.p" + - "rotobuf.DurationH\000B\005\n\003ttl\"i\n\030NodeInitial" + - "izationAction\022\027\n\017executable_file\030\001 \001(\t\0224" + - "\n\021execution_timeout\030\002 \001(\0132\031.google.proto" + - "buf.Duration\"\367\002\n\rClusterStatus\022A\n\005state\030" + - "\001 \001(\01622.google.cloud.dataproc.v1beta2.Cl" + - "usterStatus.State\022\016\n\006detail\030\002 \001(\t\0224\n\020sta" + - "te_start_time\030\003 \001(\0132\032.google.protobuf.Ti" + - "mestamp\022G\n\010substate\030\004 \001(\01625.google.cloud" + - ".dataproc.v1beta2.ClusterStatus.Substate" + - "\"V\n\005State\022\013\n\007UNKNOWN\020\000\022\014\n\010CREATING\020\001\022\013\n\007" + - "RUNNING\020\002\022\t\n\005ERROR\020\003\022\014\n\010DELETING\020\004\022\014\n\010UP" + - "DATING\020\005\"<\n\010Substate\022\017\n\013UNSPECIFIED\020\000\022\r\n" + - "\tUNHEALTHY\020\001\022\020\n\014STALE_STATUS\020\002\"\255\001\n\016Softw" + - "areConfig\022\025\n\rimage_version\030\001 \001(\t\022Q\n\nprop" + - "erties\030\002 \003(\0132=.google.cloud.dataproc.v1b" + - "eta2.SoftwareConfig.PropertiesEntry\0321\n\017P" + - "ropertiesEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001" + - "(\t:\0028\001\"\244\002\n\016ClusterMetrics\022T\n\014hdfs_metric" + - "s\030\001 \003(\0132>.google.cloud.dataproc.v1beta2." + - "ClusterMetrics.HdfsMetricsEntry\022T\n\014yarn_" + - "metrics\030\002 \003(\0132>.google.cloud.dataproc.v1" + - "beta2.ClusterMetrics.YarnMetricsEntry\0322\n" + - "\020HdfsMetricsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030" + - "\002 \001(\003:\0028\001\0322\n\020YarnMetricsEntry\022\013\n\003key\030\001 \001" + - "(\t\022\r\n\005value\030\002 \001(\003:\0028\001\"\207\001\n\024CreateClusterR" + - "equest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001" + - "(\t\0227\n\007cluster\030\002 \001(\0132&.google.cloud.datap" + - "roc.v1beta2.Cluster\022\022\n\nrequest_id\030\004 \001(\t\"" + - "\220\002\n\024UpdateClusterRequest\022\022\n\nproject_id\030\001" + - " \001(\t\022\016\n\006region\030\005 \001(\t\022\024\n\014cluster_name\030\002 \001" + - "(\t\0227\n\007cluster\030\003 \001(\0132&.google.cloud.datap" + - "roc.v1beta2.Cluster\022@\n\035graceful_decommis" + - "sion_timeout\030\006 \001(\0132\031.google.protobuf.Dur" + - "ation\022/\n\013update_mask\030\004 \001(\0132\032.google.prot" + - "obuf.FieldMask\022\022\n\nrequest_id\030\007 \001(\t\"z\n\024De" + - "leteClusterRequest\022\022\n\nproject_id\030\001 \001(\t\022\016" + - "\n\006region\030\003 \001(\t\022\024\n\014cluster_name\030\002 \001(\t\022\024\n\014" + - "cluster_uuid\030\004 \001(\t\022\022\n\nrequest_id\030\005 \001(\t\"M" + - "\n\021GetClusterRequest\022\022\n\nproject_id\030\001 \001(\t\022" + - "\016\n\006region\030\003 \001(\t\022\024\n\014cluster_name\030\002 \001(\t\"p\n" + - "\023ListClustersRequest\022\022\n\nproject_id\030\001 \001(\t" + - "\022\016\n\006region\030\004 \001(\t\022\016\n\006filter\030\005 \001(\t\022\021\n\tpage" + - "_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"i\n\024ListC" + - "lustersResponse\0228\n\010clusters\030\001 \003(\0132&.goog" + - "le.cloud.dataproc.v1beta2.Cluster\022\027\n\017nex" + - "t_page_token\030\002 \001(\t\"R\n\026DiagnoseClusterReq" + - "uest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030\003 \001(\t" + - "\022\024\n\014cluster_name\030\002 \001(\t\",\n\026DiagnoseCluste" + - "rResults\022\022\n\noutput_uri\030\001 \001(\t2\370\010\n\021Cluster" + - "Controller\022\256\001\n\rCreateCluster\0223.google.cl" + - "oud.dataproc.v1beta2.CreateClusterReques" + - "t\032\035.google.longrunning.Operation\"I\202\323\344\223\002C" + - "\"8/v1beta2/projects/{project_id}/regions" + - "/{region}/clusters:\007cluster\022\275\001\n\rUpdateCl" + - "uster\0223.google.cloud.dataproc.v1beta2.Up" + - "dateClusterRequest\032\035.google.longrunning." + - "Operation\"X\202\323\344\223\002R2G/v1beta2/projects/{pr" + - "oject_id}/regions/{region}/clusters/{clu" + - "ster_name}:\007cluster\022\264\001\n\rDeleteCluster\0223." + - "google.cloud.dataproc.v1beta2.DeleteClus" + - "terRequest\032\035.google.longrunning.Operatio" + - "n\"O\202\323\344\223\002I*G/v1beta2/projects/{project_id" + - "}/regions/{region}/clusters/{cluster_nam" + - "e}\022\267\001\n\nGetCluster\0220.google.cloud.datapro" + - "c.v1beta2.GetClusterRequest\032&.google.clo" + - "ud.dataproc.v1beta2.Cluster\"O\202\323\344\223\002I\022G/v1" + - "beta2/projects/{project_id}/regions/{reg" + - "ion}/clusters/{cluster_name}\022\271\001\n\014ListClu" + - "sters\0222.google.cloud.dataproc.v1beta2.Li" + - "stClustersRequest\0323.google.cloud.datapro" + - "c.v1beta2.ListClustersResponse\"@\202\323\344\223\002:\0228" + - "/v1beta2/projects/{project_id}/regions/{" + - "region}/clusters\022\304\001\n\017DiagnoseCluster\0225.g" + - "oogle.cloud.dataproc.v1beta2.DiagnoseClu" + - "sterRequest\032\035.google.longrunning.Operati" + - "on\"[\202\323\344\223\002U\"P/v1beta2/projects/{project_i" + - "d}/regions/{region}/clusters/{cluster_na" + - "me}:diagnose:\001*B{\n!com.google.cloud.data" + - "proc.v1beta2B\rClustersProtoP\001ZEgoogle.go" + - "lang.org/genproto/googleapis/cloud/datap" + - "roc/v1beta2;dataprocb\006proto3" + "tion\022J\n\021encryption_config\030\017 \001(\0132/.google" + + ".cloud.dataproc.v1beta2.EncryptionConfig" + + "\"/\n\020EncryptionConfig\022\033\n\023gce_pd_kms_key_n" + + "ame\030\001 \001(\t\"\264\002\n\020GceClusterConfig\022\020\n\010zone_u" + + "ri\030\001 \001(\t\022\023\n\013network_uri\030\002 \001(\t\022\026\n\016subnetw" + + "ork_uri\030\006 \001(\t\022\030\n\020internal_ip_only\030\007 \001(\010\022" + + "\027\n\017service_account\030\010 \001(\t\022\036\n\026service_acco" + + "unt_scopes\030\003 \003(\t\022\014\n\004tags\030\004 \003(\t\022O\n\010metada" + + "ta\030\005 \003(\0132=.google.cloud.dataproc.v1beta2" + + ".GceClusterConfig.MetadataEntry\032/\n\rMetad" + + "ataEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001" + + "\"\374\002\n\023InstanceGroupConfig\022\025\n\rnum_instance" + + "s\030\001 \001(\005\022\026\n\016instance_names\030\002 \003(\t\022\021\n\timage" + + "_uri\030\003 \001(\t\022\030\n\020machine_type_uri\030\004 \001(\t\022>\n\013" + + "disk_config\030\005 \001(\0132).google.cloud.datapro" + + "c.v1beta2.DiskConfig\022\026\n\016is_preemptible\030\006" + + " \001(\010\022O\n\024managed_group_config\030\007 \001(\01321.goo" + + "gle.cloud.dataproc.v1beta2.ManagedGroupC" + + "onfig\022F\n\014accelerators\030\010 \003(\01320.google.clo" + + "ud.dataproc.v1beta2.AcceleratorConfig\022\030\n" + + "\020min_cpu_platform\030\t \001(\t\"Y\n\022ManagedGroupC" + + "onfig\022\036\n\026instance_template_name\030\001 \001(\t\022#\n" + + "\033instance_group_manager_name\030\002 \001(\t\"L\n\021Ac" + + "celeratorConfig\022\034\n\024accelerator_type_uri\030" + + "\001 \001(\t\022\031\n\021accelerator_count\030\002 \001(\005\"W\n\nDisk" + + "Config\022\026\n\016boot_disk_type\030\003 \001(\t\022\031\n\021boot_d" + + "isk_size_gb\030\001 \001(\005\022\026\n\016num_local_ssds\030\002 \001(" + + "\005\"\272\001\n\017LifecycleConfig\0222\n\017idle_delete_ttl" + + "\030\001 \001(\0132\031.google.protobuf.Duration\0226\n\020aut" + + "o_delete_time\030\002 \001(\0132\032.google.protobuf.Ti" + + "mestampH\000\0224\n\017auto_delete_ttl\030\003 \001(\0132\031.goo" + + "gle.protobuf.DurationH\000B\005\n\003ttl\"i\n\030NodeIn" + + "itializationAction\022\027\n\017executable_file\030\001 " + + "\001(\t\0224\n\021execution_timeout\030\002 \001(\0132\031.google." + + "protobuf.Duration\"\367\002\n\rClusterStatus\022A\n\005s" + + "tate\030\001 \001(\01622.google.cloud.dataproc.v1bet" + + "a2.ClusterStatus.State\022\016\n\006detail\030\002 \001(\t\0224" + + "\n\020state_start_time\030\003 \001(\0132\032.google.protob" + + "uf.Timestamp\022G\n\010substate\030\004 \001(\01625.google." + + "cloud.dataproc.v1beta2.ClusterStatus.Sub" + + "state\"V\n\005State\022\013\n\007UNKNOWN\020\000\022\014\n\010CREATING\020" + + "\001\022\013\n\007RUNNING\020\002\022\t\n\005ERROR\020\003\022\014\n\010DELETING\020\004\022" + + "\014\n\010UPDATING\020\005\"<\n\010Substate\022\017\n\013UNSPECIFIED" + + "\020\000\022\r\n\tUNHEALTHY\020\001\022\020\n\014STALE_STATUS\020\002\"\255\001\n\016" + + "SoftwareConfig\022\025\n\rimage_version\030\001 \001(\t\022Q\n" + + "\nproperties\030\002 \003(\0132=.google.cloud.datapro" + + "c.v1beta2.SoftwareConfig.PropertiesEntry" + + "\0321\n\017PropertiesEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005valu" + + "e\030\002 \001(\t:\0028\001\"\244\002\n\016ClusterMetrics\022T\n\014hdfs_m" + + "etrics\030\001 \003(\0132>.google.cloud.dataproc.v1b" + + "eta2.ClusterMetrics.HdfsMetricsEntry\022T\n\014" + + "yarn_metrics\030\002 \003(\0132>.google.cloud.datapr" + + "oc.v1beta2.ClusterMetrics.YarnMetricsEnt" + + "ry\0322\n\020HdfsMetricsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005v" + + "alue\030\002 \001(\003:\0028\001\0322\n\020YarnMetricsEntry\022\013\n\003ke" + + "y\030\001 \001(\t\022\r\n\005value\030\002 \001(\003:\0028\001\"\207\001\n\024CreateClu" + + "sterRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006regio" + + "n\030\003 \001(\t\0227\n\007cluster\030\002 \001(\0132&.google.cloud." + + "dataproc.v1beta2.Cluster\022\022\n\nrequest_id\030\004" + + " \001(\t\"\220\002\n\024UpdateClusterRequest\022\022\n\nproject" + + "_id\030\001 \001(\t\022\016\n\006region\030\005 \001(\t\022\024\n\014cluster_nam" + + "e\030\002 \001(\t\0227\n\007cluster\030\003 \001(\0132&.google.cloud." + + "dataproc.v1beta2.Cluster\022@\n\035graceful_dec" + + "ommission_timeout\030\006 \001(\0132\031.google.protobu" + + "f.Duration\022/\n\013update_mask\030\004 \001(\0132\032.google" + + ".protobuf.FieldMask\022\022\n\nrequest_id\030\007 \001(\t\"" + + "z\n\024DeleteClusterRequest\022\022\n\nproject_id\030\001 " + + "\001(\t\022\016\n\006region\030\003 \001(\t\022\024\n\014cluster_name\030\002 \001(" + + "\t\022\024\n\014cluster_uuid\030\004 \001(\t\022\022\n\nrequest_id\030\005 " + + "\001(\t\"M\n\021GetClusterRequest\022\022\n\nproject_id\030\001" + + " \001(\t\022\016\n\006region\030\003 \001(\t\022\024\n\014cluster_name\030\002 \001" + + "(\t\"p\n\023ListClustersRequest\022\022\n\nproject_id\030" + + "\001 \001(\t\022\016\n\006region\030\004 \001(\t\022\016\n\006filter\030\005 \001(\t\022\021\n" + + "\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"i\n\024" + + "ListClustersResponse\0228\n\010clusters\030\001 \003(\0132&" + + ".google.cloud.dataproc.v1beta2.Cluster\022\027" + + "\n\017next_page_token\030\002 \001(\t\"R\n\026DiagnoseClust" + + "erRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006region\030" + + "\003 \001(\t\022\024\n\014cluster_name\030\002 \001(\t\",\n\026DiagnoseC" + + "lusterResults\022\022\n\noutput_uri\030\001 \001(\t2\370\010\n\021Cl" + + "usterController\022\256\001\n\rCreateCluster\0223.goog" + + "le.cloud.dataproc.v1beta2.CreateClusterR" + + "equest\032\035.google.longrunning.Operation\"I\202" + + "\323\344\223\002C\"8/v1beta2/projects/{project_id}/re" + + "gions/{region}/clusters:\007cluster\022\275\001\n\rUpd" + + "ateCluster\0223.google.cloud.dataproc.v1bet" + + "a2.UpdateClusterRequest\032\035.google.longrun" + + "ning.Operation\"X\202\323\344\223\002R2G/v1beta2/project" + + "s/{project_id}/regions/{region}/clusters" + + "/{cluster_name}:\007cluster\022\264\001\n\rDeleteClust" + + "er\0223.google.cloud.dataproc.v1beta2.Delet" + + "eClusterRequest\032\035.google.longrunning.Ope" + + "ration\"O\202\323\344\223\002I*G/v1beta2/projects/{proje" + + "ct_id}/regions/{region}/clusters/{cluste" + + "r_name}\022\267\001\n\nGetCluster\0220.google.cloud.da" + + "taproc.v1beta2.GetClusterRequest\032&.googl" + + "e.cloud.dataproc.v1beta2.Cluster\"O\202\323\344\223\002I" + + "\022G/v1beta2/projects/{project_id}/regions" + + "/{region}/clusters/{cluster_name}\022\271\001\n\014Li" + + "stClusters\0222.google.cloud.dataproc.v1bet" + + "a2.ListClustersRequest\0323.google.cloud.da" + + "taproc.v1beta2.ListClustersResponse\"@\202\323\344" + + "\223\002:\0228/v1beta2/projects/{project_id}/regi" + + "ons/{region}/clusters\022\304\001\n\017DiagnoseCluste" + + "r\0225.google.cloud.dataproc.v1beta2.Diagno" + + "seClusterRequest\032\035.google.longrunning.Op" + + "eration\"[\202\323\344\223\002U\"P/v1beta2/projects/{proj" + + "ect_id}/regions/{region}/clusters/{clust" + + "er_name}:diagnose:\001*B{\n!com.google.cloud" + + ".dataproc.v1beta2B\rClustersProtoP\001ZEgoog" + + "le.golang.org/genproto/googleapis/cloud/" + + "dataproc/v1beta2;dataprocb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -326,9 +334,15 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_ClusterConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ClusterConfig_descriptor, - new java.lang.String[] { "ConfigBucket", "GceClusterConfig", "MasterConfig", "WorkerConfig", "SecondaryWorkerConfig", "SoftwareConfig", "LifecycleConfig", "InitializationActions", }); - internal_static_google_cloud_dataproc_v1beta2_GceClusterConfig_descriptor = + new java.lang.String[] { "ConfigBucket", "GceClusterConfig", "MasterConfig", "WorkerConfig", "SecondaryWorkerConfig", "SoftwareConfig", "LifecycleConfig", "InitializationActions", "EncryptionConfig", }); + internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_descriptor = getDescriptor().getMessageTypes().get(2); + internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_descriptor, + new java.lang.String[] { "GcePdKmsKeyName", }); + internal_static_google_cloud_dataproc_v1beta2_GceClusterConfig_descriptor = + getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_dataproc_v1beta2_GceClusterConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_GceClusterConfig_descriptor, @@ -340,49 +354,49 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_GceClusterConfig_MetadataEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1beta2_InstanceGroupConfig_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_dataproc_v1beta2_InstanceGroupConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_InstanceGroupConfig_descriptor, new java.lang.String[] { "NumInstances", "InstanceNames", "ImageUri", "MachineTypeUri", "DiskConfig", "IsPreemptible", "ManagedGroupConfig", "Accelerators", "MinCpuPlatform", }); internal_static_google_cloud_dataproc_v1beta2_ManagedGroupConfig_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_dataproc_v1beta2_ManagedGroupConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ManagedGroupConfig_descriptor, new java.lang.String[] { "InstanceTemplateName", "InstanceGroupManagerName", }); internal_static_google_cloud_dataproc_v1beta2_AcceleratorConfig_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_dataproc_v1beta2_AcceleratorConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_AcceleratorConfig_descriptor, new java.lang.String[] { "AcceleratorTypeUri", "AcceleratorCount", }); internal_static_google_cloud_dataproc_v1beta2_DiskConfig_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_dataproc_v1beta2_DiskConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_DiskConfig_descriptor, new java.lang.String[] { "BootDiskType", "BootDiskSizeGb", "NumLocalSsds", }); internal_static_google_cloud_dataproc_v1beta2_LifecycleConfig_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_dataproc_v1beta2_LifecycleConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_LifecycleConfig_descriptor, new java.lang.String[] { "IdleDeleteTtl", "AutoDeleteTime", "AutoDeleteTtl", "Ttl", }); internal_static_google_cloud_dataproc_v1beta2_NodeInitializationAction_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_dataproc_v1beta2_NodeInitializationAction_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_NodeInitializationAction_descriptor, new java.lang.String[] { "ExecutableFile", "ExecutionTimeout", }); internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ClusterStatus_descriptor, new java.lang.String[] { "State", "Detail", "StateStartTime", "Substate", }); internal_static_google_cloud_dataproc_v1beta2_SoftwareConfig_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_dataproc_v1beta2_SoftwareConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_SoftwareConfig_descriptor, @@ -394,7 +408,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_SoftwareConfig_PropertiesEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1beta2_ClusterMetrics_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_dataproc_v1beta2_ClusterMetrics_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ClusterMetrics_descriptor, @@ -412,49 +426,49 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_ClusterMetrics_YarnMetricsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1beta2_CreateClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_dataproc_v1beta2_CreateClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_CreateClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "Cluster", "RequestId", }); internal_static_google_cloud_dataproc_v1beta2_UpdateClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_dataproc_v1beta2_UpdateClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_UpdateClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "ClusterName", "Cluster", "GracefulDecommissionTimeout", "UpdateMask", "RequestId", }); internal_static_google_cloud_dataproc_v1beta2_DeleteClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_dataproc_v1beta2_DeleteClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_DeleteClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "ClusterName", "ClusterUuid", "RequestId", }); internal_static_google_cloud_dataproc_v1beta2_GetClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_dataproc_v1beta2_GetClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_GetClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "ClusterName", }); internal_static_google_cloud_dataproc_v1beta2_ListClustersRequest_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_dataproc_v1beta2_ListClustersRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ListClustersRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "Filter", "PageSize", "PageToken", }); internal_static_google_cloud_dataproc_v1beta2_ListClustersResponse_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_dataproc_v1beta2_ListClustersResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ListClustersResponse_descriptor, new java.lang.String[] { "Clusters", "NextPageToken", }); internal_static_google_cloud_dataproc_v1beta2_DiagnoseClusterRequest_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(19); internal_static_google_cloud_dataproc_v1beta2_DiagnoseClusterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_DiagnoseClusterRequest_descriptor, new java.lang.String[] { "ProjectId", "Region", "ClusterName", }); internal_static_google_cloud_dataproc_v1beta2_DiagnoseClusterResults_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(20); internal_static_google_cloud_dataproc_v1beta2_DiagnoseClusterResults_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_DiagnoseClusterResults_descriptor, diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfig.java new file mode 100644 index 000000000000..62be5032d85c --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfig.java @@ -0,0 +1,584 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/clusters.proto + +package com.google.cloud.dataproc.v1beta2; + +/** + *
+ * Encryption settings for the cluster.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.EncryptionConfig} + */ +public final class EncryptionConfig extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.EncryptionConfig) + EncryptionConfigOrBuilder { +private static final long serialVersionUID = 0L; + // Use EncryptionConfig.newBuilder() to construct. + private EncryptionConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private EncryptionConfig() { + gcePdKmsKeyName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private EncryptionConfig( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + gcePdKmsKeyName_ = s; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.ClustersProto.internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.ClustersProto.internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.EncryptionConfig.class, com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder.class); + } + + public static final int GCE_PD_KMS_KEY_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object gcePdKmsKeyName_; + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public java.lang.String getGcePdKmsKeyName() { + java.lang.Object ref = gcePdKmsKeyName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + gcePdKmsKeyName_ = s; + return s; + } + } + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public com.google.protobuf.ByteString + getGcePdKmsKeyNameBytes() { + java.lang.Object ref = gcePdKmsKeyName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + gcePdKmsKeyName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getGcePdKmsKeyNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, gcePdKmsKeyName_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getGcePdKmsKeyNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, gcePdKmsKeyName_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1beta2.EncryptionConfig)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1beta2.EncryptionConfig other = (com.google.cloud.dataproc.v1beta2.EncryptionConfig) obj; + + boolean result = true; + result = result && getGcePdKmsKeyName() + .equals(other.getGcePdKmsKeyName()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + GCE_PD_KMS_KEY_NAME_FIELD_NUMBER; + hash = (53 * hash) + getGcePdKmsKeyName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1beta2.EncryptionConfig prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Encryption settings for the cluster.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.EncryptionConfig} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.EncryptionConfig) + com.google.cloud.dataproc.v1beta2.EncryptionConfigOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.ClustersProto.internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.ClustersProto.internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.EncryptionConfig.class, com.google.cloud.dataproc.v1beta2.EncryptionConfig.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1beta2.EncryptionConfig.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + gcePdKmsKeyName_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1beta2.ClustersProto.internal_static_google_cloud_dataproc_v1beta2_EncryptionConfig_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.EncryptionConfig getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1beta2.EncryptionConfig.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.EncryptionConfig build() { + com.google.cloud.dataproc.v1beta2.EncryptionConfig result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.EncryptionConfig buildPartial() { + com.google.cloud.dataproc.v1beta2.EncryptionConfig result = new com.google.cloud.dataproc.v1beta2.EncryptionConfig(this); + result.gcePdKmsKeyName_ = gcePdKmsKeyName_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1beta2.EncryptionConfig) { + return mergeFrom((com.google.cloud.dataproc.v1beta2.EncryptionConfig)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.EncryptionConfig other) { + if (other == com.google.cloud.dataproc.v1beta2.EncryptionConfig.getDefaultInstance()) return this; + if (!other.getGcePdKmsKeyName().isEmpty()) { + gcePdKmsKeyName_ = other.gcePdKmsKeyName_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1beta2.EncryptionConfig parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1beta2.EncryptionConfig) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object gcePdKmsKeyName_ = ""; + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public java.lang.String getGcePdKmsKeyName() { + java.lang.Object ref = gcePdKmsKeyName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + gcePdKmsKeyName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public com.google.protobuf.ByteString + getGcePdKmsKeyNameBytes() { + java.lang.Object ref = gcePdKmsKeyName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + gcePdKmsKeyName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public Builder setGcePdKmsKeyName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + gcePdKmsKeyName_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public Builder clearGcePdKmsKeyName() { + + gcePdKmsKeyName_ = getDefaultInstance().getGcePdKmsKeyName(); + onChanged(); + return this; + } + /** + *
+     * Optional. The Cloud KMS key name to use for PD disk encryption for all
+     * instances in the cluster.
+     * 
+ * + * string gce_pd_kms_key_name = 1; + */ + public Builder setGcePdKmsKeyNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + gcePdKmsKeyName_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.EncryptionConfig) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.EncryptionConfig) + private static final com.google.cloud.dataproc.v1beta2.EncryptionConfig DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.EncryptionConfig(); + } + + public static com.google.cloud.dataproc.v1beta2.EncryptionConfig getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public EncryptionConfig parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new EncryptionConfig(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.EncryptionConfig getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfigOrBuilder.java new file mode 100644 index 000000000000..0fe71f8f1f90 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/EncryptionConfigOrBuilder.java @@ -0,0 +1,29 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/clusters.proto + +package com.google.cloud.dataproc.v1beta2; + +public interface EncryptionConfigOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1beta2.EncryptionConfig) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + java.lang.String getGcePdKmsKeyName(); + /** + *
+   * Optional. The Cloud KMS key name to use for PD disk encryption for all
+   * instances in the cluster.
+   * 
+ * + * string gce_pd_kms_key_name = 1; + */ + com.google.protobuf.ByteString + getGcePdKmsKeyNameBytes(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java index b1fe652db26d..4974b8772960 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java @@ -232,8 +232,9 @@ public java.lang.String getInstanceNames(int index) { private volatile java.lang.Object imageUri_; /** *
-   * Output only. The Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -252,8 +253,9 @@ public java.lang.String getImageUri() { } /** *
-   * Output only. The Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -1244,8 +1246,9 @@ public Builder addInstanceNamesBytes( private java.lang.Object imageUri_ = ""; /** *
-     * Output only. The Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1264,8 +1267,9 @@ public java.lang.String getImageUri() { } /** *
-     * Output only. The Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1285,8 +1289,9 @@ public java.lang.String getImageUri() { } /** *
-     * Output only. The Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1303,8 +1308,9 @@ public Builder setImageUri( } /** *
-     * Output only. The Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; @@ -1317,8 +1323,9 @@ public Builder clearImageUri() { } /** *
-     * Output only. The Compute Engine image resource used for cluster
-     * instances. Inferred from `SoftwareConfig.image_version`.
+     * Optional. The Compute Engine image resource used for cluster
+     * instances. It can be specified or may be inferred from
+     * `SoftwareConfig.image_version`.
      * 
* * string image_uri = 3; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java index f6af4d9f3340..578e78c1f20b 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java @@ -58,8 +58,9 @@ public interface InstanceGroupConfigOrBuilder extends /** *
-   * Output only. The Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; @@ -67,8 +68,9 @@ public interface InstanceGroupConfigOrBuilder extends java.lang.String getImageUri(); /** *
-   * Output only. The Compute Engine image resource used for cluster
-   * instances. Inferred from `SoftwareConfig.image_version`.
+   * Optional. The Compute Engine image resource used for cluster
+   * instances. It can be specified or may be inferred from
+   * `SoftwareConfig.image_version`.
    * 
* * string image_uri = 3; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequest.java index a47526e26c88..ac4deb2dde57 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequest.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequest.java @@ -22,6 +22,7 @@ private InstantiateInlineWorkflowTemplateRequest(com.google.protobuf.GeneratedMe private InstantiateInlineWorkflowTemplateRequest() { parent_ = ""; instanceId_ = ""; + requestId_ = ""; } @java.lang.Override @@ -73,6 +74,12 @@ private InstantiateInlineWorkflowTemplateRequest( instanceId_ = s; break; } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -186,6 +193,48 @@ public com.google.cloud.dataproc.v1beta2.WorkflowTemplateOrBuilder getTemplateOr public static final int INSTANCE_ID_FIELD_NUMBER = 3; private volatile java.lang.Object instanceId_; + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3; + */ + public java.lang.String getInstanceId() { + java.lang.Object ref = instanceId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + instanceId_ = s; + return s; + } + } + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3; + */ + public com.google.protobuf.ByteString + getInstanceIdBytes() { + java.lang.Object ref = instanceId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + instanceId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int REQUEST_ID_FIELD_NUMBER = 4; + private volatile java.lang.Object requestId_; /** *
    * Optional. A tag that prevents multiple concurrent workflow
@@ -197,17 +246,17 @@ public com.google.cloud.dataproc.v1beta2.WorkflowTemplateOrBuilder getTemplateOr
    * underscores (_), and hyphens (-). The maximum length is 40 characters.
    * 
* - * string instance_id = 3; + * string request_id = 4; */ - public java.lang.String getInstanceId() { - java.lang.Object ref = instanceId_; + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); - instanceId_ = s; + requestId_ = s; return s; } } @@ -222,16 +271,16 @@ public java.lang.String getInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 4; */ public com.google.protobuf.ByteString - getInstanceIdBytes() { - java.lang.Object ref = instanceId_; + getRequestIdBytes() { + java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - instanceId_ = b; + requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; @@ -261,6 +310,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getInstanceIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, instanceId_); } + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_); + } unknownFields.writeTo(output); } @@ -280,6 +332,9 @@ public int getSerializedSize() { if (!getInstanceIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, instanceId_); } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -305,6 +360,8 @@ public boolean equals(final java.lang.Object obj) { } result = result && getInstanceId() .equals(other.getInstanceId()); + result = result && getRequestId() + .equals(other.getRequestId()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -324,6 +381,8 @@ public int hashCode() { } hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -471,6 +530,8 @@ public Builder clear() { } instanceId_ = ""; + requestId_ = ""; + return this; } @@ -504,6 +565,7 @@ public com.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateReques result.template_ = templateBuilder_.build(); } result.instanceId_ = instanceId_; + result.requestId_ = requestId_; onBuilt(); return result; } @@ -563,6 +625,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.InstantiateInlineWork instanceId_ = other.instanceId_; onChanged(); } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -845,6 +911,95 @@ public com.google.cloud.dataproc.v1beta2.WorkflowTemplateOrBuilder getTemplateOr } private java.lang.Object instanceId_ = ""; + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3; + */ + public java.lang.String getInstanceId() { + java.lang.Object ref = instanceId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + instanceId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3; + */ + public com.google.protobuf.ByteString + getInstanceIdBytes() { + java.lang.Object ref = instanceId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + instanceId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3; + */ + public Builder setInstanceId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + instanceId_ = value; + onChanged(); + return this; + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3; + */ + public Builder clearInstanceId() { + + instanceId_ = getDefaultInstance().getInstanceId(); + onChanged(); + return this; + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3; + */ + public Builder setInstanceIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + instanceId_ = value; + onChanged(); + return this; + } + + private java.lang.Object requestId_ = ""; /** *
      * Optional. A tag that prevents multiple concurrent workflow
@@ -856,15 +1011,15 @@ public com.google.cloud.dataproc.v1beta2.WorkflowTemplateOrBuilder getTemplateOr
      * underscores (_), and hyphens (-). The maximum length is 40 characters.
      * 
* - * string instance_id = 3; + * string request_id = 4; */ - public java.lang.String getInstanceId() { - java.lang.Object ref = instanceId_; + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); - instanceId_ = s; + requestId_ = s; return s; } else { return (java.lang.String) ref; @@ -881,16 +1036,16 @@ public java.lang.String getInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 4; */ public com.google.protobuf.ByteString - getInstanceIdBytes() { - java.lang.Object ref = instanceId_; + getRequestIdBytes() { + java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - instanceId_ = b; + requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; @@ -907,15 +1062,15 @@ public java.lang.String getInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 4; */ - public Builder setInstanceId( + public Builder setRequestId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } - instanceId_ = value; + requestId_ = value; onChanged(); return this; } @@ -930,11 +1085,11 @@ public Builder setInstanceId( * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 4; */ - public Builder clearInstanceId() { + public Builder clearRequestId() { - instanceId_ = getDefaultInstance().getInstanceId(); + requestId_ = getDefaultInstance().getRequestId(); onChanged(); return this; } @@ -949,16 +1104,16 @@ public Builder clearInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 4; */ - public Builder setInstanceIdBytes( + public Builder setRequestIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - instanceId_ = value; + requestId_ = value; onChanged(); return this; } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequestOrBuilder.java index 64c6cd914978..6c4825b920f2 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequestOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateInlineWorkflowTemplateRequestOrBuilder.java @@ -54,6 +54,24 @@ public interface InstantiateInlineWorkflowTemplateRequestOrBuilder extends */ com.google.cloud.dataproc.v1beta2.WorkflowTemplateOrBuilder getTemplateOrBuilder(); + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3; + */ + java.lang.String getInstanceId(); + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3; + */ + com.google.protobuf.ByteString + getInstanceIdBytes(); + /** *
    * Optional. A tag that prevents multiple concurrent workflow
@@ -65,9 +83,9 @@ public interface InstantiateInlineWorkflowTemplateRequestOrBuilder extends
    * underscores (_), and hyphens (-). The maximum length is 40 characters.
    * 
* - * string instance_id = 3; + * string request_id = 4; */ - java.lang.String getInstanceId(); + java.lang.String getRequestId(); /** *
    * Optional. A tag that prevents multiple concurrent workflow
@@ -79,8 +97,8 @@ public interface InstantiateInlineWorkflowTemplateRequestOrBuilder extends
    * underscores (_), and hyphens (-). The maximum length is 40 characters.
    * 
* - * string instance_id = 3; + * string request_id = 4; */ com.google.protobuf.ByteString - getInstanceIdBytes(); + getRequestIdBytes(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequest.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequest.java index 3bb01dcd800b..db88ba6ae24f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequest.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequest.java @@ -23,6 +23,7 @@ private InstantiateWorkflowTemplateRequest() { name_ = ""; version_ = 0; instanceId_ = ""; + requestId_ = ""; } @java.lang.Override @@ -66,6 +67,25 @@ private InstantiateWorkflowTemplateRequest( instanceId_ = s; break; } + case 34: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + parameters_ = com.google.protobuf.MapField.newMapField( + ParametersDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000010; + } + com.google.protobuf.MapEntry + parameters__ = input.readMessage( + ParametersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + parameters_.getMutableMap().put( + parameters__.getKey(), parameters__.getValue()); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + requestId_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -90,6 +110,18 @@ private InstantiateWorkflowTemplateRequest( return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor; } + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 4: + return internalGetParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { @@ -98,6 +130,7 @@ private InstantiateWorkflowTemplateRequest( com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.class, com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.Builder.class); } + private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** @@ -163,6 +196,48 @@ public int getVersion() { public static final int INSTANCE_ID_FIELD_NUMBER = 3; private volatile java.lang.Object instanceId_; + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public java.lang.String getInstanceId() { + java.lang.Object ref = instanceId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + instanceId_ = s; + return s; + } + } + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public com.google.protobuf.ByteString + getInstanceIdBytes() { + java.lang.Object ref = instanceId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + instanceId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int REQUEST_ID_FIELD_NUMBER = 5; + private volatile java.lang.Object requestId_; /** *
    * Optional. A tag that prevents multiple concurrent workflow
@@ -174,17 +249,17 @@ public int getVersion() {
    * underscores (_), and hyphens (-). The maximum length is 40 characters.
    * 
* - * string instance_id = 3; + * string request_id = 5; */ - public java.lang.String getInstanceId() { - java.lang.Object ref = instanceId_; + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); - instanceId_ = s; + requestId_ = s; return s; } } @@ -199,22 +274,118 @@ public java.lang.String getInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 5; */ public com.google.protobuf.ByteString - getInstanceIdBytes() { - java.lang.Object ref = instanceId_; + getRequestIdBytes() { + java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - instanceId_ = b; + requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } + public static final int PARAMETERS_FIELD_NUMBER = 4; + private static final class ParametersDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> parameters_; + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + + public boolean containsParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetParameters().getMap().containsKey(key); + } + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + + public java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + + public java.lang.String getParametersOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -238,6 +409,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getInstanceIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, instanceId_); } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetParameters(), + ParametersDefaultEntryHolder.defaultEntry, + 4); + if (!getRequestIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, requestId_); + } unknownFields.writeTo(output); } @@ -257,6 +437,19 @@ public int getSerializedSize() { if (!getInstanceIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, instanceId_); } + for (java.util.Map.Entry entry + : internalGetParameters().getMap().entrySet()) { + com.google.protobuf.MapEntry + parameters__ = ParametersDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, parameters__); + } + if (!getRequestIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, requestId_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -279,6 +472,10 @@ public boolean equals(final java.lang.Object obj) { == other.getVersion()); result = result && getInstanceId() .equals(other.getInstanceId()); + result = result && getRequestId() + .equals(other.getRequestId()); + result = result && internalGetParameters().equals( + other.internalGetParameters()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -296,6 +493,12 @@ public int hashCode() { hash = (53 * hash) + getVersion(); hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); + hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; + hash = (53 * hash) + getRequestId().hashCode(); + if (!internalGetParameters().getMap().isEmpty()) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + internalGetParameters().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -407,6 +610,28 @@ public static final class Builder extends return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor; } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 4: + return internalGetParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 4: + return internalGetMutableParameters(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { @@ -439,6 +664,9 @@ public Builder clear() { instanceId_ = ""; + requestId_ = ""; + + internalGetMutableParameters().clear(); return this; } @@ -465,9 +693,15 @@ public com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest buil @java.lang.Override public com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest buildPartial() { com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest result = new com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; result.name_ = name_; result.version_ = version_; result.instanceId_ = instanceId_; + result.requestId_ = requestId_; + result.parameters_ = internalGetParameters(); + result.parameters_.makeImmutable(); + result.bitField0_ = to_bitField0_; onBuilt(); return result; } @@ -527,6 +761,12 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTe instanceId_ = other.instanceId_; onChanged(); } + if (!other.getRequestId().isEmpty()) { + requestId_ = other.requestId_; + onChanged(); + } + internalGetMutableParameters().mergeFrom( + other.internalGetParameters()); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -555,6 +795,7 @@ public Builder mergeFrom( } return this; } + private int bitField0_; private java.lang.Object name_ = ""; /** @@ -706,6 +947,95 @@ public Builder clearVersion() { } private java.lang.Object instanceId_ = ""; + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public java.lang.String getInstanceId() { + java.lang.Object ref = instanceId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + instanceId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public com.google.protobuf.ByteString + getInstanceIdBytes() { + java.lang.Object ref = instanceId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + instanceId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public Builder setInstanceId( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + instanceId_ = value; + onChanged(); + return this; + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public Builder clearInstanceId() { + + instanceId_ = getDefaultInstance().getInstanceId(); + onChanged(); + return this; + } + /** + *
+     * Deprecated. Please use `request_id` field instead.
+     * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated public Builder setInstanceIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + instanceId_ = value; + onChanged(); + return this; + } + + private java.lang.Object requestId_ = ""; /** *
      * Optional. A tag that prevents multiple concurrent workflow
@@ -717,15 +1047,15 @@ public Builder clearVersion() {
      * underscores (_), and hyphens (-). The maximum length is 40 characters.
      * 
* - * string instance_id = 3; + * string request_id = 5; */ - public java.lang.String getInstanceId() { - java.lang.Object ref = instanceId_; + public java.lang.String getRequestId() { + java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); - instanceId_ = s; + requestId_ = s; return s; } else { return (java.lang.String) ref; @@ -742,16 +1072,16 @@ public java.lang.String getInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 5; */ public com.google.protobuf.ByteString - getInstanceIdBytes() { - java.lang.Object ref = instanceId_; + getRequestIdBytes() { + java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - instanceId_ = b; + requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; @@ -768,15 +1098,15 @@ public java.lang.String getInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 5; */ - public Builder setInstanceId( + public Builder setRequestId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } - instanceId_ = value; + requestId_ = value; onChanged(); return this; } @@ -791,11 +1121,11 @@ public Builder setInstanceId( * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 5; */ - public Builder clearInstanceId() { + public Builder clearRequestId() { - instanceId_ = getDefaultInstance().getInstanceId(); + requestId_ = getDefaultInstance().getRequestId(); onChanged(); return this; } @@ -810,19 +1140,177 @@ public Builder clearInstanceId() { * underscores (_), and hyphens (-). The maximum length is 40 characters. *
* - * string instance_id = 3; + * string request_id = 5; */ - public Builder setInstanceIdBytes( + public Builder setRequestIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - instanceId_ = value; + requestId_ = value; onChanged(); return this; } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> parameters_; + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + private com.google.protobuf.MapField + internalGetMutableParameters() { + onChanged();; + if (parameters_ == null) { + parameters_ = com.google.protobuf.MapField.newMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + if (!parameters_.isMutable()) { + parameters_ = parameters_.copy(); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + + public boolean containsParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetParameters().getMap().containsKey(key); + } + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + + public java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + + public java.lang.String getParametersOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearParameters() { + internalGetMutableParameters().getMutableMap() + .clear(); + return this; + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + + public Builder removeParameters( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + internalGetMutableParameters().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableParameters() { + return internalGetMutableParameters().getMutableMap(); + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + public Builder putParameters( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + internalGetMutableParameters().getMutableMap() + .put(key, value); + return this; + } + /** + *
+     * Optional. Map from parameter names to values that should be used for those
+     * parameters. Values may not exceed 100 characters.
+     * 
+ * + * map<string, string> parameters = 4; + */ + + public Builder putAllParameters( + java.util.Map values) { + internalGetMutableParameters().getMutableMap() + .putAll(values); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequestOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequestOrBuilder.java index 4ecd13892645..b1afda00eb93 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequestOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstantiateWorkflowTemplateRequestOrBuilder.java @@ -42,6 +42,24 @@ public interface InstantiateWorkflowTemplateRequestOrBuilder extends */ int getVersion(); + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated java.lang.String getInstanceId(); + /** + *
+   * Deprecated. Please use `request_id` field instead.
+   * 
+ * + * string instance_id = 3 [deprecated = true]; + */ + @java.lang.Deprecated com.google.protobuf.ByteString + getInstanceIdBytes(); + /** *
    * Optional. A tag that prevents multiple concurrent workflow
@@ -53,9 +71,9 @@ public interface InstantiateWorkflowTemplateRequestOrBuilder extends
    * underscores (_), and hyphens (-). The maximum length is 40 characters.
    * 
* - * string instance_id = 3; + * string request_id = 5; */ - java.lang.String getInstanceId(); + java.lang.String getRequestId(); /** *
    * Optional. A tag that prevents multiple concurrent workflow
@@ -67,8 +85,67 @@ public interface InstantiateWorkflowTemplateRequestOrBuilder extends
    * underscores (_), and hyphens (-). The maximum length is 40 characters.
    * 
* - * string instance_id = 3; + * string request_id = 5; */ com.google.protobuf.ByteString - getInstanceIdBytes(); + getRequestIdBytes(); + + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + int getParametersCount(); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + boolean containsParameters( + java.lang.String key); + /** + * Use {@link #getParametersMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getParameters(); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + java.util.Map + getParametersMap(); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + + java.lang.String getParametersOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * Optional. Map from parameter names to values that should be used for those
+   * parameters. Values may not exceed 100 characters.
+   * 
+ * + * map<string, string> parameters = 4; + */ + + java.lang.String getParametersOrThrow( + java.lang.String key); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Job.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Job.java index 78d53309a476..c63d794481ec 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Job.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Job.java @@ -22,8 +22,10 @@ private Job(com.google.protobuf.GeneratedMessageV3.Builder builder) { private Job() { statusHistory_ = java.util.Collections.emptyList(); yarnApplications_ = java.util.Collections.emptyList(); + submittedBy_ = ""; driverOutputResourceUri_ = ""; driverControlFilesUri_ = ""; + jobUuid_ = ""; } @java.lang.Override @@ -168,6 +170,12 @@ private Job( input.readMessage(com.google.cloud.dataproc.v1beta2.YarnApplication.parser(), extensionRegistry)); break; } + case 82: { + java.lang.String s = input.readStringRequireUtf8(); + + submittedBy_ = s; + break; + } case 98: { com.google.cloud.dataproc.v1beta2.SparkSqlJob.Builder subBuilder = null; if (typeJobCase_ == 12) { @@ -204,10 +212,10 @@ private Job( break; } case 146: { - if (!((mutable_bitField0_ & 0x00002000) == 0x00002000)) { + if (!((mutable_bitField0_ & 0x00004000) == 0x00004000)) { labels_ = com.google.protobuf.MapField.newMapField( LabelsDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00002000; + mutable_bitField0_ |= 0x00004000; } com.google.protobuf.MapEntry labels__ = input.readMessage( @@ -229,6 +237,12 @@ private Job( break; } + case 178: { + java.lang.String s = input.readStringRequireUtf8(); + + jobUuid_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -791,6 +805,50 @@ public com.google.cloud.dataproc.v1beta2.YarnApplicationOrBuilder getYarnApplica return yarnApplications_.get(index); } + public static final int SUBMITTED_BY_FIELD_NUMBER = 10; + private volatile java.lang.Object submittedBy_; + /** + *
+   * Output only. The email address of the user submitting the job. For jobs
+   * submitted on the cluster, the address is <code>username@hostname</code>.
+   * 
+ * + * string submitted_by = 10; + */ + public java.lang.String getSubmittedBy() { + java.lang.Object ref = submittedBy_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + submittedBy_ = s; + return s; + } + } + /** + *
+   * Output only. The email address of the user submitting the job. For jobs
+   * submitted on the cluster, the address is <code>username@hostname</code>.
+   * 
+ * + * string submitted_by = 10; + */ + public com.google.protobuf.ByteString + getSubmittedByBytes() { + java.lang.Object ref = submittedBy_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + submittedBy_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + public static final int DRIVER_OUTPUT_RESOURCE_URI_FIELD_NUMBER = 17; private volatile java.lang.Object driverOutputResourceUri_; /** @@ -1026,6 +1084,52 @@ public com.google.cloud.dataproc.v1beta2.JobSchedulingOrBuilder getSchedulingOrB return getScheduling(); } + public static final int JOB_UUID_FIELD_NUMBER = 22; + private volatile java.lang.Object jobUuid_; + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + public java.lang.String getJobUuid() { + java.lang.Object ref = jobUuid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobUuid_ = s; + return s; + } + } + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + public com.google.protobuf.ByteString + getJobUuidBytes() { + java.lang.Object ref = jobUuid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -1067,6 +1171,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < yarnApplications_.size(); i++) { output.writeMessage(9, yarnApplications_.get(i)); } + if (!getSubmittedByBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 10, submittedBy_); + } if (typeJobCase_ == 12) { output.writeMessage(12, (com.google.cloud.dataproc.v1beta2.SparkSqlJob) typeJob_); } @@ -1088,6 +1195,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (scheduling_ != null) { output.writeMessage(20, getScheduling()); } + if (!getJobUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 22, jobUuid_); + } unknownFields.writeTo(output); } @@ -1133,6 +1243,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(9, yarnApplications_.get(i)); } + if (!getSubmittedByBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, submittedBy_); + } if (typeJobCase_ == 12) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(12, (com.google.cloud.dataproc.v1beta2.SparkSqlJob) typeJob_); @@ -1161,6 +1274,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(20, getScheduling()); } + if (!getJobUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(22, jobUuid_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1196,6 +1312,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getStatusHistoryList()); result = result && getYarnApplicationsList() .equals(other.getYarnApplicationsList()); + result = result && getSubmittedBy() + .equals(other.getSubmittedBy()); result = result && getDriverOutputResourceUri() .equals(other.getDriverOutputResourceUri()); result = result && getDriverControlFilesUri() @@ -1207,6 +1325,8 @@ public boolean equals(final java.lang.Object obj) { result = result && getScheduling() .equals(other.getScheduling()); } + result = result && getJobUuid() + .equals(other.getJobUuid()); result = result && getTypeJobCase().equals( other.getTypeJobCase()); if (!result) return false; @@ -1269,6 +1389,8 @@ public int hashCode() { hash = (37 * hash) + YARN_APPLICATIONS_FIELD_NUMBER; hash = (53 * hash) + getYarnApplicationsList().hashCode(); } + hash = (37 * hash) + SUBMITTED_BY_FIELD_NUMBER; + hash = (53 * hash) + getSubmittedBy().hashCode(); hash = (37 * hash) + DRIVER_OUTPUT_RESOURCE_URI_FIELD_NUMBER; hash = (53 * hash) + getDriverOutputResourceUri().hashCode(); hash = (37 * hash) + DRIVER_CONTROL_FILES_URI_FIELD_NUMBER; @@ -1281,6 +1403,8 @@ public int hashCode() { hash = (37 * hash) + SCHEDULING_FIELD_NUMBER; hash = (53 * hash) + getScheduling().hashCode(); } + hash = (37 * hash) + JOB_UUID_FIELD_NUMBER; + hash = (53 * hash) + getJobUuid().hashCode(); switch (typeJobCase_) { case 3: hash = (37 * hash) + HADOOP_JOB_FIELD_NUMBER; @@ -1500,6 +1624,8 @@ public Builder clear() { } else { yarnApplicationsBuilder_.clear(); } + submittedBy_ = ""; + driverOutputResourceUri_ = ""; driverControlFilesUri_ = ""; @@ -1511,6 +1637,8 @@ public Builder clear() { scheduling_ = null; schedulingBuilder_ = null; } + jobUuid_ = ""; + typeJobCase_ = 0; typeJob_ = null; return this; @@ -1616,6 +1744,7 @@ public com.google.cloud.dataproc.v1beta2.Job buildPartial() { } else { result.yarnApplications_ = yarnApplicationsBuilder_.build(); } + result.submittedBy_ = submittedBy_; result.driverOutputResourceUri_ = driverOutputResourceUri_; result.driverControlFilesUri_ = driverControlFilesUri_; result.labels_ = internalGetLabels(); @@ -1625,6 +1754,7 @@ public com.google.cloud.dataproc.v1beta2.Job buildPartial() { } else { result.scheduling_ = schedulingBuilder_.build(); } + result.jobUuid_ = jobUuid_; result.bitField0_ = to_bitField0_; result.typeJobCase_ = typeJobCase_; onBuilt(); @@ -1736,6 +1866,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.Job other) { } } } + if (!other.getSubmittedBy().isEmpty()) { + submittedBy_ = other.submittedBy_; + onChanged(); + } if (!other.getDriverOutputResourceUri().isEmpty()) { driverOutputResourceUri_ = other.driverOutputResourceUri_; onChanged(); @@ -1749,6 +1883,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.Job other) { if (other.hasScheduling()) { mergeScheduling(other.getScheduling()); } + if (!other.getJobUuid().isEmpty()) { + jobUuid_ = other.jobUuid_; + onChanged(); + } switch (other.getTypeJobCase()) { case HADOOP_JOB: { mergeHadoopJob(other.getHadoopJob()); @@ -4028,6 +4166,100 @@ public com.google.cloud.dataproc.v1beta2.YarnApplication.Builder addYarnApplicat return yarnApplicationsBuilder_; } + private java.lang.Object submittedBy_ = ""; + /** + *
+     * Output only. The email address of the user submitting the job. For jobs
+     * submitted on the cluster, the address is <code>username@hostname</code>.
+     * 
+ * + * string submitted_by = 10; + */ + public java.lang.String getSubmittedBy() { + java.lang.Object ref = submittedBy_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + submittedBy_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The email address of the user submitting the job. For jobs
+     * submitted on the cluster, the address is <code>username@hostname</code>.
+     * 
+ * + * string submitted_by = 10; + */ + public com.google.protobuf.ByteString + getSubmittedByBytes() { + java.lang.Object ref = submittedBy_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + submittedBy_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The email address of the user submitting the job. For jobs
+     * submitted on the cluster, the address is <code>username@hostname</code>.
+     * 
+ * + * string submitted_by = 10; + */ + public Builder setSubmittedBy( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + submittedBy_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The email address of the user submitting the job. For jobs
+     * submitted on the cluster, the address is <code>username@hostname</code>.
+     * 
+ * + * string submitted_by = 10; + */ + public Builder clearSubmittedBy() { + + submittedBy_ = getDefaultInstance().getSubmittedBy(); + onChanged(); + return this; + } + /** + *
+     * Output only. The email address of the user submitting the job. For jobs
+     * submitted on the cluster, the address is <code>username@hostname</code>.
+     * 
+ * + * string submitted_by = 10; + */ + public Builder setSubmittedByBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + submittedBy_ = value; + onChanged(); + return this; + } + private java.lang.Object driverOutputResourceUri_ = ""; /** *
@@ -4559,6 +4791,105 @@ public com.google.cloud.dataproc.v1beta2.JobSchedulingOrBuilder getSchedulingOrB
       }
       return schedulingBuilder_;
     }
+
+    private java.lang.Object jobUuid_ = "";
+    /**
+     * 
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public java.lang.String getJobUuid() { + java.lang.Object ref = jobUuid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobUuid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public com.google.protobuf.ByteString + getJobUuidBytes() { + java.lang.Object ref = jobUuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public Builder setJobUuid( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + jobUuid_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public Builder clearJobUuid() { + + jobUuid_ = getDefaultInstance().getJobUuid(); + onChanged(); + return this; + } + /** + *
+     * Output only. A UUID that uniquely identifies a job within the project
+     * over time. This is in contrast to a user-settable reference.job_id that
+     * may be reused over time.
+     * 
+ * + * string job_uuid = 22; + */ + public Builder setJobUuidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + jobUuid_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobOrBuilder.java index bcdd41e1a69d..2fc4468369ca 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobOrBuilder.java @@ -348,6 +348,26 @@ com.google.cloud.dataproc.v1beta2.JobStatusOrBuilder getStatusHistoryOrBuilder( com.google.cloud.dataproc.v1beta2.YarnApplicationOrBuilder getYarnApplicationsOrBuilder( int index); + /** + *
+   * Output only. The email address of the user submitting the job. For jobs
+   * submitted on the cluster, the address is <code>username@hostname</code>.
+   * 
+ * + * string submitted_by = 10; + */ + java.lang.String getSubmittedBy(); + /** + *
+   * Output only. The email address of the user submitting the job. For jobs
+   * submitted on the cluster, the address is <code>username@hostname</code>.
+   * 
+ * + * string submitted_by = 10; + */ + com.google.protobuf.ByteString + getSubmittedByBytes(); + /** *
    * Output only. A URI pointing to the location of the stdout of the job's
@@ -494,5 +514,27 @@ java.lang.String getLabelsOrThrow(
    */
   com.google.cloud.dataproc.v1beta2.JobSchedulingOrBuilder getSchedulingOrBuilder();
 
+  /**
+   * 
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + java.lang.String getJobUuid(); + /** + *
+   * Output only. A UUID that uniquely identifies a job within the project
+   * over time. This is in contrast to a user-settable reference.job_id that
+   * may be reused over time.
+   * 
+ * + * string job_uuid = 22; + */ + com.google.protobuf.ByteString + getJobUuidBytes(); + public com.google.cloud.dataproc.v1beta2.Job.TypeJobCase getTypeJobCase(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java index a165a4fd8069..bc246f0396d5 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java @@ -280,7 +280,7 @@ public static void registerAllExtensions( "\n\005State\022\025\n\021STATE_UNSPECIFIED\020\000\022\007\n\003NEW\020\001\022" + "\016\n\nNEW_SAVING\020\002\022\r\n\tSUBMITTED\020\003\022\014\n\010ACCEPT" + "ED\020\004\022\013\n\007RUNNING\020\005\022\014\n\010FINISHED\020\006\022\n\n\006FAILE" + - "D\020\007\022\n\n\006KILLED\020\010\"\312\007\n\003Job\022>\n\treference\030\001 \001" + + "D\020\007\022\n\n\006KILLED\020\010\"\362\007\n\003Job\022>\n\treference\030\001 \001" + "(\0132+.google.cloud.dataproc.v1beta2.JobRe" + "ference\022>\n\tplacement\030\002 \001(\0132+.google.clou" + "d.dataproc.v1beta2.JobPlacement\022>\n\nhadoo" + @@ -298,12 +298,13 @@ public static void registerAllExtensions( "istory\030\r \003(\0132(.google.cloud.dataproc.v1b" + "eta2.JobStatus\022I\n\021yarn_applications\030\t \003(" + "\0132..google.cloud.dataproc.v1beta2.YarnAp" + - "plication\022\"\n\032driver_output_resource_uri\030" + - "\021 \001(\t\022 \n\030driver_control_files_uri\030\017 \001(\t\022" + - ">\n\006labels\030\022 \003(\0132..google.cloud.dataproc." + - "v1beta2.Job.LabelsEntry\022@\n\nscheduling\030\024 " + - "\001(\0132,.google.cloud.dataproc.v1beta2.JobS" + - "cheduling\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n" + + "plication\022\024\n\014submitted_by\030\n \001(\t\022\"\n\032drive" + + "r_output_resource_uri\030\021 \001(\t\022 \n\030driver_co" + + "ntrol_files_uri\030\017 \001(\t\022>\n\006labels\030\022 \003(\0132.." + + "google.cloud.dataproc.v1beta2.Job.Labels" + + "Entry\022@\n\nscheduling\030\024 \001(\0132,.google.cloud" + + ".dataproc.v1beta2.JobScheduling\022\020\n\010job_u" + + "uid\030\026 \001(\t\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001B\n\n\010type_job\".\n\rJobSched" + "uling\022\035\n\025max_failures_per_hour\030\001 \001(\005\"{\n\020" + "SubmitJobRequest\022\022\n\nproject_id\030\001 \001(\t\022\016\n\006" + @@ -512,7 +513,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_Job_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_Job_descriptor, - new java.lang.String[] { "Reference", "Placement", "HadoopJob", "SparkJob", "PysparkJob", "HiveJob", "PigJob", "SparkSqlJob", "Status", "StatusHistory", "YarnApplications", "DriverOutputResourceUri", "DriverControlFilesUri", "Labels", "Scheduling", "TypeJob", }); + new java.lang.String[] { "Reference", "Placement", "HadoopJob", "SparkJob", "PysparkJob", "HiveJob", "PigJob", "SparkSqlJob", "Status", "StatusHistory", "YarnApplications", "SubmittedBy", "DriverOutputResourceUri", "DriverControlFilesUri", "Labels", "Scheduling", "JobUuid", "TypeJob", }); internal_static_google_cloud_dataproc_v1beta2_Job_LabelsEntry_descriptor = internal_static_google_cloud_dataproc_v1beta2_Job_descriptor.getNestedTypes().get(0); internal_static_google_cloud_dataproc_v1beta2_Job_LabelsEntry_fieldAccessorTable = new diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java index c66339584d8d..4d6e038f050e 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java @@ -5,7 +5,7 @@ /** *
- * Specifies the cluster auto delete related schedule configuration.
+ * Specifies the cluster auto-delete schedule configuration.
  * 
* * Protobuf type {@code google.cloud.dataproc.v1beta2.LifecycleConfig} @@ -161,8 +161,11 @@ public int getNumber() { private com.google.protobuf.Duration idleDeleteTtl_; /** *
-   * Optional. The longest duration that cluster would keep alive while staying
-   *  idle; passing this threshold will cause cluster to be auto-deleted.
+   * Optional. The duration to keep the cluster alive while idling.
+   * Passing this threshold will cause the cluster to be
+   * deleted. Valid range: **[10m, 14d]**.
+   * Example: **"10m"**, the minimum value, to delete the
+   * cluster when it has had no jobs running for 10 minutes.
    * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -172,8 +175,11 @@ public boolean hasIdleDeleteTtl() { } /** *
-   * Optional. The longest duration that cluster would keep alive while staying
-   *  idle; passing this threshold will cause cluster to be auto-deleted.
+   * Optional. The duration to keep the cluster alive while idling.
+   * Passing this threshold will cause the cluster to be
+   * deleted. Valid range: **[10m, 14d]**.
+   * Example: **"10m"**, the minimum value, to delete the
+   * cluster when it has had no jobs running for 10 minutes.
    * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -183,8 +189,11 @@ public com.google.protobuf.Duration getIdleDeleteTtl() { } /** *
-   * Optional. The longest duration that cluster would keep alive while staying
-   *  idle; passing this threshold will cause cluster to be auto-deleted.
+   * Optional. The duration to keep the cluster alive while idling.
+   * Passing this threshold will cause the cluster to be
+   * deleted. Valid range: **[10m, 14d]**.
+   * Example: **"10m"**, the minimum value, to delete the
+   * cluster when it has had no jobs running for 10 minutes.
    * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -234,8 +243,9 @@ public com.google.protobuf.TimestampOrBuilder getAutoDeleteTimeOrBuilder() { public static final int AUTO_DELETE_TTL_FIELD_NUMBER = 3; /** *
-   * Optional. The life duration of cluster, the cluster will be auto-deleted
-   * at the end of this duration.
+   * Optional. The lifetime duration of cluster. The cluster will be
+   * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+   * Example: **"1d"**, to delete the cluster 1 day after its creation..
    * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -245,8 +255,9 @@ public boolean hasAutoDeleteTtl() { } /** *
-   * Optional. The life duration of cluster, the cluster will be auto-deleted
-   * at the end of this duration.
+   * Optional. The lifetime duration of cluster. The cluster will be
+   * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+   * Example: **"1d"**, to delete the cluster 1 day after its creation..
    * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -259,8 +270,9 @@ public com.google.protobuf.Duration getAutoDeleteTtl() { } /** *
-   * Optional. The life duration of cluster, the cluster will be auto-deleted
-   * at the end of this duration.
+   * Optional. The lifetime duration of cluster. The cluster will be
+   * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+   * Example: **"1d"**, to delete the cluster 1 day after its creation..
    * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -476,7 +488,7 @@ protected Builder newBuilderForType( } /** *
-   * Specifies the cluster auto delete related schedule configuration.
+   * Specifies the cluster auto-delete schedule configuration.
    * 
* * Protobuf type {@code google.cloud.dataproc.v1beta2.LifecycleConfig} @@ -683,8 +695,11 @@ public Builder clearTtl() { com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> idleDeleteTtlBuilder_; /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -694,8 +709,11 @@ public boolean hasIdleDeleteTtl() { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -709,8 +727,11 @@ public com.google.protobuf.Duration getIdleDeleteTtl() { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -730,8 +751,11 @@ public Builder setIdleDeleteTtl(com.google.protobuf.Duration value) { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -749,8 +773,11 @@ public Builder setIdleDeleteTtl( } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -772,8 +799,11 @@ public Builder mergeIdleDeleteTtl(com.google.protobuf.Duration value) { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -791,8 +821,11 @@ public Builder clearIdleDeleteTtl() { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -804,8 +837,11 @@ public com.google.protobuf.Duration.Builder getIdleDeleteTtlBuilder() { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -820,8 +856,11 @@ public com.google.protobuf.DurationOrBuilder getIdleDeleteTtlOrBuilder() { } /** *
-     * Optional. The longest duration that cluster would keep alive while staying
-     *  idle; passing this threshold will cause cluster to be auto-deleted.
+     * Optional. The duration to keep the cluster alive while idling.
+     * Passing this threshold will cause the cluster to be
+     * deleted. Valid range: **[10m, 14d]**.
+     * Example: **"10m"**, the minimum value, to delete the
+     * cluster when it has had no jobs running for 10 minutes.
      * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -1016,8 +1055,9 @@ public com.google.protobuf.TimestampOrBuilder getAutoDeleteTimeOrBuilder() { com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> autoDeleteTtlBuilder_; /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1027,8 +1067,9 @@ public boolean hasAutoDeleteTtl() { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1048,8 +1089,9 @@ public com.google.protobuf.Duration getAutoDeleteTtl() { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1069,8 +1111,9 @@ public Builder setAutoDeleteTtl(com.google.protobuf.Duration value) { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1088,8 +1131,9 @@ public Builder setAutoDeleteTtl( } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1115,8 +1159,9 @@ public Builder mergeAutoDeleteTtl(com.google.protobuf.Duration value) { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1139,8 +1184,9 @@ public Builder clearAutoDeleteTtl() { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1150,8 +1196,9 @@ public com.google.protobuf.Duration.Builder getAutoDeleteTtlBuilder() { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -1168,8 +1215,9 @@ public com.google.protobuf.DurationOrBuilder getAutoDeleteTtlOrBuilder() { } /** *
-     * Optional. The life duration of cluster, the cluster will be auto-deleted
-     * at the end of this duration.
+     * Optional. The lifetime duration of cluster. The cluster will be
+     * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+     * Example: **"1d"**, to delete the cluster 1 day after its creation..
      * 
* * .google.protobuf.Duration auto_delete_ttl = 3; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java index 752d81517534..4a9934ca5e94 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java @@ -9,8 +9,11 @@ public interface LifecycleConfigOrBuilder extends /** *
-   * Optional. The longest duration that cluster would keep alive while staying
-   *  idle; passing this threshold will cause cluster to be auto-deleted.
+   * Optional. The duration to keep the cluster alive while idling.
+   * Passing this threshold will cause the cluster to be
+   * deleted. Valid range: **[10m, 14d]**.
+   * Example: **"10m"**, the minimum value, to delete the
+   * cluster when it has had no jobs running for 10 minutes.
    * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -18,8 +21,11 @@ public interface LifecycleConfigOrBuilder extends boolean hasIdleDeleteTtl(); /** *
-   * Optional. The longest duration that cluster would keep alive while staying
-   *  idle; passing this threshold will cause cluster to be auto-deleted.
+   * Optional. The duration to keep the cluster alive while idling.
+   * Passing this threshold will cause the cluster to be
+   * deleted. Valid range: **[10m, 14d]**.
+   * Example: **"10m"**, the minimum value, to delete the
+   * cluster when it has had no jobs running for 10 minutes.
    * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -27,8 +33,11 @@ public interface LifecycleConfigOrBuilder extends com.google.protobuf.Duration getIdleDeleteTtl(); /** *
-   * Optional. The longest duration that cluster would keep alive while staying
-   *  idle; passing this threshold will cause cluster to be auto-deleted.
+   * Optional. The duration to keep the cluster alive while idling.
+   * Passing this threshold will cause the cluster to be
+   * deleted. Valid range: **[10m, 14d]**.
+   * Example: **"10m"**, the minimum value, to delete the
+   * cluster when it has had no jobs running for 10 minutes.
    * 
* * .google.protobuf.Duration idle_delete_ttl = 1; @@ -62,8 +71,9 @@ public interface LifecycleConfigOrBuilder extends /** *
-   * Optional. The life duration of cluster, the cluster will be auto-deleted
-   * at the end of this duration.
+   * Optional. The lifetime duration of cluster. The cluster will be
+   * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+   * Example: **"1d"**, to delete the cluster 1 day after its creation..
    * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -71,8 +81,9 @@ public interface LifecycleConfigOrBuilder extends boolean hasAutoDeleteTtl(); /** *
-   * Optional. The life duration of cluster, the cluster will be auto-deleted
-   * at the end of this duration.
+   * Optional. The lifetime duration of cluster. The cluster will be
+   * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+   * Example: **"1d"**, to delete the cluster 1 day after its creation..
    * 
* * .google.protobuf.Duration auto_delete_ttl = 3; @@ -80,8 +91,9 @@ public interface LifecycleConfigOrBuilder extends com.google.protobuf.Duration getAutoDeleteTtl(); /** *
-   * Optional. The life duration of cluster, the cluster will be auto-deleted
-   * at the end of this duration.
+   * Optional. The lifetime duration of cluster. The cluster will be
+   * auto-deleted at the end of this period. Valid range: **[10m, 14d]**.
+   * Example: **"1d"**, to delete the cluster 1 day after its creation..
    * 
* * .google.protobuf.Duration auto_delete_ttl = 3; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidation.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidation.java new file mode 100644 index 000000000000..01968c7edac0 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidation.java @@ -0,0 +1,994 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +/** + *
+ * Configuration for parameter validation.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.ParameterValidation} + */ +public final class ParameterValidation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.ParameterValidation) + ParameterValidationOrBuilder { +private static final long serialVersionUID = 0L; + // Use ParameterValidation.newBuilder() to construct. + private ParameterValidation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ParameterValidation() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ParameterValidation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.cloud.dataproc.v1beta2.RegexValidation.Builder subBuilder = null; + if (validationTypeCase_ == 1) { + subBuilder = ((com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_).toBuilder(); + } + validationType_ = + input.readMessage(com.google.cloud.dataproc.v1beta2.RegexValidation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_); + validationType_ = subBuilder.buildPartial(); + } + validationTypeCase_ = 1; + break; + } + case 18: { + com.google.cloud.dataproc.v1beta2.ValueValidation.Builder subBuilder = null; + if (validationTypeCase_ == 2) { + subBuilder = ((com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_).toBuilder(); + } + validationType_ = + input.readMessage(com.google.cloud.dataproc.v1beta2.ValueValidation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_); + validationType_ = subBuilder.buildPartial(); + } + validationTypeCase_ = 2; + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.ParameterValidation.class, com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder.class); + } + + private int validationTypeCase_ = 0; + private java.lang.Object validationType_; + public enum ValidationTypeCase + implements com.google.protobuf.Internal.EnumLite { + REGEX(1), + VALUES(2), + VALIDATIONTYPE_NOT_SET(0); + private final int value; + private ValidationTypeCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ValidationTypeCase valueOf(int value) { + return forNumber(value); + } + + public static ValidationTypeCase forNumber(int value) { + switch (value) { + case 1: return REGEX; + case 2: return VALUES; + case 0: return VALIDATIONTYPE_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public ValidationTypeCase + getValidationTypeCase() { + return ValidationTypeCase.forNumber( + validationTypeCase_); + } + + public static final int REGEX_FIELD_NUMBER = 1; + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public boolean hasRegex() { + return validationTypeCase_ == 1; + } + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1beta2.RegexValidation getRegex() { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder getRegexOrBuilder() { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } + + public static final int VALUES_FIELD_NUMBER = 2; + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public boolean hasValues() { + return validationTypeCase_ == 2; + } + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1beta2.ValueValidation getValues() { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder getValuesOrBuilder() { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (validationTypeCase_ == 1) { + output.writeMessage(1, (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_); + } + if (validationTypeCase_ == 2) { + output.writeMessage(2, (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (validationTypeCase_ == 1) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_); + } + if (validationTypeCase_ == 2) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1beta2.ParameterValidation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1beta2.ParameterValidation other = (com.google.cloud.dataproc.v1beta2.ParameterValidation) obj; + + boolean result = true; + result = result && getValidationTypeCase().equals( + other.getValidationTypeCase()); + if (!result) return false; + switch (validationTypeCase_) { + case 1: + result = result && getRegex() + .equals(other.getRegex()); + break; + case 2: + result = result && getValues() + .equals(other.getValues()); + break; + case 0: + default: + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + switch (validationTypeCase_) { + case 1: + hash = (37 * hash) + REGEX_FIELD_NUMBER; + hash = (53 * hash) + getRegex().hashCode(); + break; + case 2: + hash = (37 * hash) + VALUES_FIELD_NUMBER; + hash = (53 * hash) + getValues().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.ParameterValidation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1beta2.ParameterValidation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Configuration for parameter validation.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.ParameterValidation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.ParameterValidation) + com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.ParameterValidation.class, com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1beta2.ParameterValidation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + validationTypeCase_ = 0; + validationType_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ParameterValidation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ParameterValidation build() { + com.google.cloud.dataproc.v1beta2.ParameterValidation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ParameterValidation buildPartial() { + com.google.cloud.dataproc.v1beta2.ParameterValidation result = new com.google.cloud.dataproc.v1beta2.ParameterValidation(this); + if (validationTypeCase_ == 1) { + if (regexBuilder_ == null) { + result.validationType_ = validationType_; + } else { + result.validationType_ = regexBuilder_.build(); + } + } + if (validationTypeCase_ == 2) { + if (valuesBuilder_ == null) { + result.validationType_ = validationType_; + } else { + result.validationType_ = valuesBuilder_.build(); + } + } + result.validationTypeCase_ = validationTypeCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1beta2.ParameterValidation) { + return mergeFrom((com.google.cloud.dataproc.v1beta2.ParameterValidation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.ParameterValidation other) { + if (other == com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance()) return this; + switch (other.getValidationTypeCase()) { + case REGEX: { + mergeRegex(other.getRegex()); + break; + } + case VALUES: { + mergeValues(other.getValues()); + break; + } + case VALIDATIONTYPE_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1beta2.ParameterValidation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1beta2.ParameterValidation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int validationTypeCase_ = 0; + private java.lang.Object validationType_; + public ValidationTypeCase + getValidationTypeCase() { + return ValidationTypeCase.forNumber( + validationTypeCase_); + } + + public Builder clearValidationType() { + validationTypeCase_ = 0; + validationType_ = null; + onChanged(); + return this; + } + + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.RegexValidation, com.google.cloud.dataproc.v1beta2.RegexValidation.Builder, com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder> regexBuilder_; + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public boolean hasRegex() { + return validationTypeCase_ == 1; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1beta2.RegexValidation getRegex() { + if (regexBuilder_ == null) { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } else { + if (validationTypeCase_ == 1) { + return regexBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public Builder setRegex(com.google.cloud.dataproc.v1beta2.RegexValidation value) { + if (regexBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + validationType_ = value; + onChanged(); + } else { + regexBuilder_.setMessage(value); + } + validationTypeCase_ = 1; + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public Builder setRegex( + com.google.cloud.dataproc.v1beta2.RegexValidation.Builder builderForValue) { + if (regexBuilder_ == null) { + validationType_ = builderForValue.build(); + onChanged(); + } else { + regexBuilder_.setMessage(builderForValue.build()); + } + validationTypeCase_ = 1; + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public Builder mergeRegex(com.google.cloud.dataproc.v1beta2.RegexValidation value) { + if (regexBuilder_ == null) { + if (validationTypeCase_ == 1 && + validationType_ != com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance()) { + validationType_ = com.google.cloud.dataproc.v1beta2.RegexValidation.newBuilder((com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_) + .mergeFrom(value).buildPartial(); + } else { + validationType_ = value; + } + onChanged(); + } else { + if (validationTypeCase_ == 1) { + regexBuilder_.mergeFrom(value); + } + regexBuilder_.setMessage(value); + } + validationTypeCase_ = 1; + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public Builder clearRegex() { + if (regexBuilder_ == null) { + if (validationTypeCase_ == 1) { + validationTypeCase_ = 0; + validationType_ = null; + onChanged(); + } + } else { + if (validationTypeCase_ == 1) { + validationTypeCase_ = 0; + validationType_ = null; + } + regexBuilder_.clear(); + } + return this; + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1beta2.RegexValidation.Builder getRegexBuilder() { + return getRegexFieldBuilder().getBuilder(); + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + public com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder getRegexOrBuilder() { + if ((validationTypeCase_ == 1) && (regexBuilder_ != null)) { + return regexBuilder_.getMessageOrBuilder(); + } else { + if (validationTypeCase_ == 1) { + return (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on regular expressions.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.RegexValidation, com.google.cloud.dataproc.v1beta2.RegexValidation.Builder, com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder> + getRegexFieldBuilder() { + if (regexBuilder_ == null) { + if (!(validationTypeCase_ == 1)) { + validationType_ = com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } + regexBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.RegexValidation, com.google.cloud.dataproc.v1beta2.RegexValidation.Builder, com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder>( + (com.google.cloud.dataproc.v1beta2.RegexValidation) validationType_, + getParentForChildren(), + isClean()); + validationType_ = null; + } + validationTypeCase_ = 1; + onChanged();; + return regexBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.ValueValidation, com.google.cloud.dataproc.v1beta2.ValueValidation.Builder, com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder> valuesBuilder_; + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public boolean hasValues() { + return validationTypeCase_ == 2; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1beta2.ValueValidation getValues() { + if (valuesBuilder_ == null) { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } else { + if (validationTypeCase_ == 2) { + return valuesBuilder_.getMessage(); + } + return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public Builder setValues(com.google.cloud.dataproc.v1beta2.ValueValidation value) { + if (valuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + validationType_ = value; + onChanged(); + } else { + valuesBuilder_.setMessage(value); + } + validationTypeCase_ = 2; + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public Builder setValues( + com.google.cloud.dataproc.v1beta2.ValueValidation.Builder builderForValue) { + if (valuesBuilder_ == null) { + validationType_ = builderForValue.build(); + onChanged(); + } else { + valuesBuilder_.setMessage(builderForValue.build()); + } + validationTypeCase_ = 2; + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public Builder mergeValues(com.google.cloud.dataproc.v1beta2.ValueValidation value) { + if (valuesBuilder_ == null) { + if (validationTypeCase_ == 2 && + validationType_ != com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance()) { + validationType_ = com.google.cloud.dataproc.v1beta2.ValueValidation.newBuilder((com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_) + .mergeFrom(value).buildPartial(); + } else { + validationType_ = value; + } + onChanged(); + } else { + if (validationTypeCase_ == 2) { + valuesBuilder_.mergeFrom(value); + } + valuesBuilder_.setMessage(value); + } + validationTypeCase_ = 2; + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public Builder clearValues() { + if (valuesBuilder_ == null) { + if (validationTypeCase_ == 2) { + validationTypeCase_ = 0; + validationType_ = null; + onChanged(); + } + } else { + if (validationTypeCase_ == 2) { + validationTypeCase_ = 0; + validationType_ = null; + } + valuesBuilder_.clear(); + } + return this; + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1beta2.ValueValidation.Builder getValuesBuilder() { + return getValuesFieldBuilder().getBuilder(); + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + public com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder getValuesOrBuilder() { + if ((validationTypeCase_ == 2) && (valuesBuilder_ != null)) { + return valuesBuilder_.getMessageOrBuilder(); + } else { + if (validationTypeCase_ == 2) { + return (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_; + } + return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } + } + /** + *
+     * Validation based on a list of allowed values.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.ValueValidation, com.google.cloud.dataproc.v1beta2.ValueValidation.Builder, com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder> + getValuesFieldBuilder() { + if (valuesBuilder_ == null) { + if (!(validationTypeCase_ == 2)) { + validationType_ = com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } + valuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.ValueValidation, com.google.cloud.dataproc.v1beta2.ValueValidation.Builder, com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder>( + (com.google.cloud.dataproc.v1beta2.ValueValidation) validationType_, + getParentForChildren(), + isClean()); + validationType_ = null; + } + validationTypeCase_ = 2; + onChanged();; + return valuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.ParameterValidation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ParameterValidation) + private static final com.google.cloud.dataproc.v1beta2.ParameterValidation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.ParameterValidation(); + } + + public static com.google.cloud.dataproc.v1beta2.ParameterValidation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ParameterValidation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ParameterValidation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ParameterValidation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidationOrBuilder.java new file mode 100644 index 000000000000..2fa46dfcd09c --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ParameterValidationOrBuilder.java @@ -0,0 +1,61 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +public interface ParameterValidationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1beta2.ParameterValidation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + boolean hasRegex(); + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + com.google.cloud.dataproc.v1beta2.RegexValidation getRegex(); + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.RegexValidation regex = 1; + */ + com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder getRegexOrBuilder(); + + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + boolean hasValues(); + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + com.google.cloud.dataproc.v1beta2.ValueValidation getValues(); + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ValueValidation values = 2; + */ + com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder getValuesOrBuilder(); + + public com.google.cloud.dataproc.v1beta2.ParameterValidation.ValidationTypeCase getValidationTypeCase(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidation.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidation.java new file mode 100644 index 000000000000..a6b370ea5615 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidation.java @@ -0,0 +1,672 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +/** + *
+ * Validation based on regular expressions.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.RegexValidation} + */ +public final class RegexValidation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.RegexValidation) + RegexValidationOrBuilder { +private static final long serialVersionUID = 0L; + // Use RegexValidation.newBuilder() to construct. + private RegexValidation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RegexValidation() { + regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegexValidation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + regexes_.add(s); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = regexes_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_RegexValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_RegexValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.RegexValidation.class, com.google.cloud.dataproc.v1beta2.RegexValidation.Builder.class); + } + + public static final int REGEXES_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList regexes_; + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ProtocolStringList + getRegexesList() { + return regexes_; + } + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public int getRegexesCount() { + return regexes_.size(); + } + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public java.lang.String getRegexes(int index) { + return regexes_.get(index); + } + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ByteString + getRegexesBytes(int index) { + return regexes_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < regexes_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, regexes_.getRaw(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < regexes_.size(); i++) { + dataSize += computeStringSizeNoTag(regexes_.getRaw(i)); + } + size += dataSize; + size += 1 * getRegexesList().size(); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1beta2.RegexValidation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1beta2.RegexValidation other = (com.google.cloud.dataproc.v1beta2.RegexValidation) obj; + + boolean result = true; + result = result && getRegexesList() + .equals(other.getRegexesList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getRegexesCount() > 0) { + hash = (37 * hash) + REGEXES_FIELD_NUMBER; + hash = (53 * hash) + getRegexesList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.RegexValidation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1beta2.RegexValidation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Validation based on regular expressions.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.RegexValidation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.RegexValidation) + com.google.cloud.dataproc.v1beta2.RegexValidationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_RegexValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_RegexValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.RegexValidation.class, com.google.cloud.dataproc.v1beta2.RegexValidation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1beta2.RegexValidation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_RegexValidation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.RegexValidation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.RegexValidation build() { + com.google.cloud.dataproc.v1beta2.RegexValidation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.RegexValidation buildPartial() { + com.google.cloud.dataproc.v1beta2.RegexValidation result = new com.google.cloud.dataproc.v1beta2.RegexValidation(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = regexes_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regexes_ = regexes_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1beta2.RegexValidation) { + return mergeFrom((com.google.cloud.dataproc.v1beta2.RegexValidation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.RegexValidation other) { + if (other == com.google.cloud.dataproc.v1beta2.RegexValidation.getDefaultInstance()) return this; + if (!other.regexes_.isEmpty()) { + if (regexes_.isEmpty()) { + regexes_ = other.regexes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegexesIsMutable(); + regexes_.addAll(other.regexes_); + } + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1beta2.RegexValidation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1beta2.RegexValidation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringList regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureRegexesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regexes_ = new com.google.protobuf.LazyStringArrayList(regexes_); + bitField0_ |= 0x00000001; + } + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ProtocolStringList + getRegexesList() { + return regexes_.getUnmodifiableView(); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public int getRegexesCount() { + return regexes_.size(); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public java.lang.String getRegexes(int index) { + return regexes_.get(index); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public com.google.protobuf.ByteString + getRegexesBytes(int index) { + return regexes_.getByteString(index); + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder setRegexes( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegexesIsMutable(); + regexes_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder addRegexes( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegexesIsMutable(); + regexes_.add(value); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder addAllRegexes( + java.lang.Iterable values) { + ensureRegexesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, regexes_); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder clearRegexes() { + regexes_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+     * Required. RE2 regular expressions used to validate the parameter's value.
+     * The value must match the regex in its entirety (substring
+     * matches are not sufficient).
+     * 
+ * + * repeated string regexes = 1; + */ + public Builder addRegexesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureRegexesIsMutable(); + regexes_.add(value); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.RegexValidation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.RegexValidation) + private static final com.google.cloud.dataproc.v1beta2.RegexValidation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.RegexValidation(); + } + + public static com.google.cloud.dataproc.v1beta2.RegexValidation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public RegexValidation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegexValidation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.RegexValidation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidationOrBuilder.java new file mode 100644 index 000000000000..7c663f0749be --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegexValidationOrBuilder.java @@ -0,0 +1,52 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +public interface RegexValidationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1beta2.RegexValidation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + java.util.List + getRegexesList(); + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + int getRegexesCount(); + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + java.lang.String getRegexes(int index); + /** + *
+   * Required. RE2 regular expressions used to validate the parameter's value.
+   * The value must match the regex in its entirety (substring
+   * matches are not sufficient).
+   * 
+ * + * repeated string regexes = 1; + */ + com.google.protobuf.ByteString + getRegexesBytes(int index); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java new file mode 100644 index 000000000000..e1d94de7958a --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java @@ -0,0 +1,1695 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +/** + *
+ * A configurable parameter that replaces one or more fields in the template.
+ * Parameterizable fields:
+ * - Labels
+ * - File uris
+ * - Job properties
+ * - Job arguments
+ * - Script variables
+ * - Main class (in HadoopJob and SparkJob)
+ * - Zone (in ClusterSelector)
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.TemplateParameter} + */ +public final class TemplateParameter extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.TemplateParameter) + TemplateParameterOrBuilder { +private static final long serialVersionUID = 0L; + // Use TemplateParameter.newBuilder() to construct. + private TemplateParameter(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private TemplateParameter() { + name_ = ""; + fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + description_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TemplateParameter( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000002; + } + fields_.add(s); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + description_ = s; + break; + } + case 34: { + com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder subBuilder = null; + if (validation_ != null) { + subBuilder = validation_.toBuilder(); + } + validation_ = input.readMessage(com.google.cloud.dataproc.v1beta2.ParameterValidation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(validation_); + validation_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = fields_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.TemplateParameter.class, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FIELDS_FIELD_NUMBER = 2; + private com.google.protobuf.LazyStringList fields_; + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ProtocolStringList + getFieldsList() { + return fields_; + } + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public int getFieldsCount() { + return fields_.size(); + } + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public java.lang.String getFields(int index) { + return fields_.get(index); + } + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ByteString + getFieldsBytes(int index) { + return fields_.getByteString(index); + } + + public static final int DESCRIPTION_FIELD_NUMBER = 3; + private volatile java.lang.Object description_; + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VALIDATION_FIELD_NUMBER = 4; + private com.google.cloud.dataproc.v1beta2.ParameterValidation validation_; + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public boolean hasValidation() { + return validation_ != null; + } + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1beta2.ParameterValidation getValidation() { + return validation_ == null ? com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance() : validation_; + } + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder getValidationOrBuilder() { + return getValidation(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + for (int i = 0; i < fields_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, fields_.getRaw(i)); + } + if (!getDescriptionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, description_); + } + if (validation_ != null) { + output.writeMessage(4, getValidation()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + { + int dataSize = 0; + for (int i = 0; i < fields_.size(); i++) { + dataSize += computeStringSizeNoTag(fields_.getRaw(i)); + } + size += dataSize; + size += 1 * getFieldsList().size(); + } + if (!getDescriptionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, description_); + } + if (validation_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getValidation()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1beta2.TemplateParameter)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1beta2.TemplateParameter other = (com.google.cloud.dataproc.v1beta2.TemplateParameter) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getFieldsList() + .equals(other.getFieldsList()); + result = result && getDescription() + .equals(other.getDescription()); + result = result && (hasValidation() == other.hasValidation()); + if (hasValidation()) { + result = result && getValidation() + .equals(other.getValidation()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (getFieldsCount() > 0) { + hash = (37 * hash) + FIELDS_FIELD_NUMBER; + hash = (53 * hash) + getFieldsList().hashCode(); + } + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + if (hasValidation()) { + hash = (37 * hash) + VALIDATION_FIELD_NUMBER; + hash = (53 * hash) + getValidation().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.TemplateParameter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1beta2.TemplateParameter prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A configurable parameter that replaces one or more fields in the template.
+   * Parameterizable fields:
+   * - Labels
+   * - File uris
+   * - Job properties
+   * - Job arguments
+   * - Script variables
+   * - Main class (in HadoopJob and SparkJob)
+   * - Zone (in ClusterSelector)
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.TemplateParameter} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.TemplateParameter) + com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.TemplateParameter.class, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1beta2.TemplateParameter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + description_ = ""; + + if (validationBuilder_ == null) { + validation_ = null; + } else { + validation_ = null; + validationBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.TemplateParameter getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1beta2.TemplateParameter.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.TemplateParameter build() { + com.google.cloud.dataproc.v1beta2.TemplateParameter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.TemplateParameter buildPartial() { + com.google.cloud.dataproc.v1beta2.TemplateParameter result = new com.google.cloud.dataproc.v1beta2.TemplateParameter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = fields_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.fields_ = fields_; + result.description_ = description_; + if (validationBuilder_ == null) { + result.validation_ = validation_; + } else { + result.validation_ = validationBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1beta2.TemplateParameter) { + return mergeFrom((com.google.cloud.dataproc.v1beta2.TemplateParameter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.TemplateParameter other) { + if (other == com.google.cloud.dataproc.v1beta2.TemplateParameter.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.fields_.isEmpty()) { + if (fields_.isEmpty()) { + fields_ = other.fields_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFieldsIsMutable(); + fields_.addAll(other.fields_); + } + onChanged(); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + onChanged(); + } + if (other.hasValidation()) { + mergeValidation(other.getValidation()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1beta2.TemplateParameter parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1beta2.TemplateParameter) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Required.  Parameter name.
+     * The parameter name is used as the key, and paired with the
+     * parameter value, which are passed to the template when the template
+     * is instantiated.
+     * The name must contain only capital letters (A-Z), numbers (0-9), and
+     * underscores (_), and must not start with a number. The maximum length is
+     * 40 characters.
+     * 
+ * + * string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureFieldsIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + fields_ = new com.google.protobuf.LazyStringArrayList(fields_); + bitField0_ |= 0x00000002; + } + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ProtocolStringList + getFieldsList() { + return fields_.getUnmodifiableView(); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public int getFieldsCount() { + return fields_.size(); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public java.lang.String getFields(int index) { + return fields_.get(index); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public com.google.protobuf.ByteString + getFieldsBytes(int index) { + return fields_.getByteString(index); + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder setFields( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFieldsIsMutable(); + fields_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder addFields( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFieldsIsMutable(); + fields_.add(value); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder addAllFields( + java.lang.Iterable values) { + ensureFieldsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, fields_); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder clearFields() { + fields_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+     * Required. Paths to all fields that the parameter replaces.
+     * A field is allowed to appear in at most one parameter's list of field paths.
+     * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+     * For example, a field path that references the zone field of a workflow
+     * template's cluster selector would be specified as
+     * `placement.clusterSelector.zone`.
+     * Also, field paths can reference fields using the following syntax:
+     * * Values in maps can be referenced by key:
+     *     * labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * placement.managedCluster.labels['key']
+     *     * placement.clusterSelector.clusterLabels['key']
+     *     * jobs['step-id'].labels['key']
+     * * Jobs in the jobs list can be referenced by step-id:
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * jobs['step-id'].hiveJob.queryFileUri
+     *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+     *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+     *     * jobs['step-id'].hadoopJob.archiveUris[0]
+     *     * jobs['step-id'].hadoopJob.fileUris[0]
+     *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+     * * Items in repeated fields can be referenced by a zero-based index:
+     *     * jobs['step-id'].sparkJob.args[0]
+     * * Other examples:
+     *     * jobs['step-id'].hadoopJob.properties['key']
+     *     * jobs['step-id'].hadoopJob.args[0]
+     *     * jobs['step-id'].hiveJob.scriptVariables['key']
+     *     * jobs['step-id'].hadoopJob.mainJarFileUri
+     *     * placement.clusterSelector.zone
+     * It may not be possible to parameterize maps and repeated fields in their
+     * entirety since only individual map values and individual items in repeated
+     * fields can be referenced. For example, the following field paths are
+     * invalid:
+     * - placement.clusterSelector.clusterLabels
+     * - jobs['step-id'].sparkJob.args
+     * 
+ * + * repeated string fields = 2; + */ + public Builder addFieldsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureFieldsIsMutable(); + fields_.add(value); + onChanged(); + return this; + } + + private java.lang.Object description_ = ""; + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + description_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public Builder clearDescription() { + + description_ = getDefaultInstance().getDescription(); + onChanged(); + return this; + } + /** + *
+     * Optional. Brief description of the parameter.
+     * Must not exceed 1024 characters.
+     * 
+ * + * string description = 3; + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + description_ = value; + onChanged(); + return this; + } + + private com.google.cloud.dataproc.v1beta2.ParameterValidation validation_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.ParameterValidation, com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder, com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder> validationBuilder_; + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public boolean hasValidation() { + return validationBuilder_ != null || validation_ != null; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1beta2.ParameterValidation getValidation() { + if (validationBuilder_ == null) { + return validation_ == null ? com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance() : validation_; + } else { + return validationBuilder_.getMessage(); + } + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public Builder setValidation(com.google.cloud.dataproc.v1beta2.ParameterValidation value) { + if (validationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + validation_ = value; + onChanged(); + } else { + validationBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public Builder setValidation( + com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder builderForValue) { + if (validationBuilder_ == null) { + validation_ = builderForValue.build(); + onChanged(); + } else { + validationBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public Builder mergeValidation(com.google.cloud.dataproc.v1beta2.ParameterValidation value) { + if (validationBuilder_ == null) { + if (validation_ != null) { + validation_ = + com.google.cloud.dataproc.v1beta2.ParameterValidation.newBuilder(validation_).mergeFrom(value).buildPartial(); + } else { + validation_ = value; + } + onChanged(); + } else { + validationBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public Builder clearValidation() { + if (validationBuilder_ == null) { + validation_ = null; + onChanged(); + } else { + validation_ = null; + validationBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder getValidationBuilder() { + + onChanged(); + return getValidationFieldBuilder().getBuilder(); + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + public com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder getValidationOrBuilder() { + if (validationBuilder_ != null) { + return validationBuilder_.getMessageOrBuilder(); + } else { + return validation_ == null ? + com.google.cloud.dataproc.v1beta2.ParameterValidation.getDefaultInstance() : validation_; + } + } + /** + *
+     * Optional. Validation rules to be applied to this parameter's value.
+     * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.ParameterValidation, com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder, com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder> + getValidationFieldBuilder() { + if (validationBuilder_ == null) { + validationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.ParameterValidation, com.google.cloud.dataproc.v1beta2.ParameterValidation.Builder, com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder>( + getValidation(), + getParentForChildren(), + isClean()); + validation_ = null; + } + return validationBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.TemplateParameter) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.TemplateParameter) + private static final com.google.cloud.dataproc.v1beta2.TemplateParameter DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.TemplateParameter(); + } + + public static com.google.cloud.dataproc.v1beta2.TemplateParameter getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TemplateParameter parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TemplateParameter(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.TemplateParameter getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java new file mode 100644 index 000000000000..cdad04647b02 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java @@ -0,0 +1,255 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +public interface TemplateParameterOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1beta2.TemplateParameter) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Required.  Parameter name.
+   * The parameter name is used as the key, and paired with the
+   * parameter value, which are passed to the template when the template
+   * is instantiated.
+   * The name must contain only capital letters (A-Z), numbers (0-9), and
+   * underscores (_), and must not start with a number. The maximum length is
+   * 40 characters.
+   * 
+ * + * string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + java.util.List + getFieldsList(); + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + int getFieldsCount(); + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + java.lang.String getFields(int index); + /** + *
+   * Required. Paths to all fields that the parameter replaces.
+   * A field is allowed to appear in at most one parameter's list of field paths.
+   * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
+   * For example, a field path that references the zone field of a workflow
+   * template's cluster selector would be specified as
+   * `placement.clusterSelector.zone`.
+   * Also, field paths can reference fields using the following syntax:
+   * * Values in maps can be referenced by key:
+   *     * labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * placement.managedCluster.labels['key']
+   *     * placement.clusterSelector.clusterLabels['key']
+   *     * jobs['step-id'].labels['key']
+   * * Jobs in the jobs list can be referenced by step-id:
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * jobs['step-id'].hiveJob.queryFileUri
+   *     * jobs['step-id'].pySparkJob.mainPythonFileUri
+   *     * jobs['step-id'].hadoopJob.jarFileUris[0]
+   *     * jobs['step-id'].hadoopJob.archiveUris[0]
+   *     * jobs['step-id'].hadoopJob.fileUris[0]
+   *     * jobs['step-id'].pySparkJob.pythonFileUris[0]
+   * * Items in repeated fields can be referenced by a zero-based index:
+   *     * jobs['step-id'].sparkJob.args[0]
+   * * Other examples:
+   *     * jobs['step-id'].hadoopJob.properties['key']
+   *     * jobs['step-id'].hadoopJob.args[0]
+   *     * jobs['step-id'].hiveJob.scriptVariables['key']
+   *     * jobs['step-id'].hadoopJob.mainJarFileUri
+   *     * placement.clusterSelector.zone
+   * It may not be possible to parameterize maps and repeated fields in their
+   * entirety since only individual map values and individual items in repeated
+   * fields can be referenced. For example, the following field paths are
+   * invalid:
+   * - placement.clusterSelector.clusterLabels
+   * - jobs['step-id'].sparkJob.args
+   * 
+ * + * repeated string fields = 2; + */ + com.google.protobuf.ByteString + getFieldsBytes(int index); + + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + java.lang.String getDescription(); + /** + *
+   * Optional. Brief description of the parameter.
+   * Must not exceed 1024 characters.
+   * 
+ * + * string description = 3; + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + boolean hasValidation(); + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + com.google.cloud.dataproc.v1beta2.ParameterValidation getValidation(); + /** + *
+   * Optional. Validation rules to be applied to this parameter's value.
+   * 
+ * + * .google.cloud.dataproc.v1beta2.ParameterValidation validation = 4; + */ + com.google.cloud.dataproc.v1beta2.ParameterValidationOrBuilder getValidationOrBuilder(); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidation.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidation.java new file mode 100644 index 000000000000..af5eedbdfb4e --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidation.java @@ -0,0 +1,646 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +/** + *
+ * Validation based on a list of allowed values.
+ * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.ValueValidation} + */ +public final class ValueValidation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1beta2.ValueValidation) + ValueValidationOrBuilder { +private static final long serialVersionUID = 0L; + // Use ValueValidation.newBuilder() to construct. + private ValueValidation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ValueValidation() { + values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ValueValidation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + values_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + values_.add(s); + break; + } + default: { + if (!parseUnknownFieldProto3( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + values_ = values_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ValueValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ValueValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.ValueValidation.class, com.google.cloud.dataproc.v1beta2.ValueValidation.Builder.class); + } + + public static final int VALUES_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList values_; + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ProtocolStringList + getValuesList() { + return values_; + } + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public int getValuesCount() { + return values_.size(); + } + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public java.lang.String getValues(int index) { + return values_.get(index); + } + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ByteString + getValuesBytes(int index) { + return values_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < values_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, values_.getRaw(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < values_.size(); i++) { + dataSize += computeStringSizeNoTag(values_.getRaw(i)); + } + size += dataSize; + size += 1 * getValuesList().size(); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.dataproc.v1beta2.ValueValidation)) { + return super.equals(obj); + } + com.google.cloud.dataproc.v1beta2.ValueValidation other = (com.google.cloud.dataproc.v1beta2.ValueValidation) obj; + + boolean result = true; + result = result && getValuesList() + .equals(other.getValuesList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getValuesCount() > 0) { + hash = (37 * hash) + VALUES_FIELD_NUMBER; + hash = (53 * hash) + getValuesList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static com.google.cloud.dataproc.v1beta2.ValueValidation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(com.google.cloud.dataproc.v1beta2.ValueValidation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Validation based on a list of allowed values.
+   * 
+ * + * Protobuf type {@code google.cloud.dataproc.v1beta2.ValueValidation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1beta2.ValueValidation) + com.google.cloud.dataproc.v1beta2.ValueValidationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ValueValidation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ValueValidation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.dataproc.v1beta2.ValueValidation.class, com.google.cloud.dataproc.v1beta2.ValueValidation.Builder.class); + } + + // Construct using com.google.cloud.dataproc.v1beta2.ValueValidation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return com.google.cloud.dataproc.v1beta2.WorkflowTemplatesProto.internal_static_google_cloud_dataproc_v1beta2_ValueValidation_descriptor; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ValueValidation getDefaultInstanceForType() { + return com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ValueValidation build() { + com.google.cloud.dataproc.v1beta2.ValueValidation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ValueValidation buildPartial() { + com.google.cloud.dataproc.v1beta2.ValueValidation result = new com.google.cloud.dataproc.v1beta2.ValueValidation(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + values_ = values_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.values_ = values_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return (Builder) super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return (Builder) super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.dataproc.v1beta2.ValueValidation) { + return mergeFrom((com.google.cloud.dataproc.v1beta2.ValueValidation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.ValueValidation other) { + if (other == com.google.cloud.dataproc.v1beta2.ValueValidation.getDefaultInstance()) return this; + if (!other.values_.isEmpty()) { + if (values_.isEmpty()) { + values_ = other.values_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureValuesIsMutable(); + values_.addAll(other.values_); + } + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.dataproc.v1beta2.ValueValidation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (com.google.cloud.dataproc.v1beta2.ValueValidation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringList values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureValuesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + values_ = new com.google.protobuf.LazyStringArrayList(values_); + bitField0_ |= 0x00000001; + } + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ProtocolStringList + getValuesList() { + return values_.getUnmodifiableView(); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public int getValuesCount() { + return values_.size(); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public java.lang.String getValues(int index) { + return values_.get(index); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public com.google.protobuf.ByteString + getValuesBytes(int index) { + return values_.getByteString(index); + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder setValues( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureValuesIsMutable(); + values_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder addValues( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureValuesIsMutable(); + values_.add(value); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder addAllValues( + java.lang.Iterable values) { + ensureValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, values_); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder clearValues() { + values_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+     * Required. List of allowed values for the parameter.
+     * 
+ * + * repeated string values = 1; + */ + public Builder addValuesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureValuesIsMutable(); + values_.add(value); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFieldsProto3(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1beta2.ValueValidation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ValueValidation) + private static final com.google.cloud.dataproc.v1beta2.ValueValidation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1beta2.ValueValidation(); + } + + public static com.google.cloud.dataproc.v1beta2.ValueValidation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ValueValidation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ValueValidation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.ValueValidation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidationOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidationOrBuilder.java new file mode 100644 index 000000000000..d482f4faa2c4 --- /dev/null +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ValueValidationOrBuilder.java @@ -0,0 +1,44 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package com.google.cloud.dataproc.v1beta2; + +public interface ValueValidationOrBuilder extends + // @@protoc_insertion_point(interface_extends:google.cloud.dataproc.v1beta2.ValueValidation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + java.util.List + getValuesList(); + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + int getValuesCount(); + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + java.lang.String getValues(int index); + /** + *
+   * Required. List of allowed values for the parameter.
+   * 
+ * + * repeated string values = 1; + */ + com.google.protobuf.ByteString + getValuesBytes(int index); +} diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java index 21dce0c8f8dc..5a0c60ab2afa 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java @@ -24,6 +24,7 @@ private WorkflowMetadata() { version_ = 0; state_ = 0; clusterName_ = ""; + clusterUuid_ = ""; } @java.lang.Override @@ -125,6 +126,38 @@ private WorkflowMetadata( parameters__.getKey(), parameters__.getValue()); break; } + case 74: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (startTime_ != null) { + subBuilder = startTime_.toBuilder(); + } + startTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(startTime_); + startTime_ = subBuilder.buildPartial(); + } + + break; + } + case 82: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (endTime_ != null) { + subBuilder = endTime_.toBuilder(); + } + endTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(endTime_); + endTime_ = subBuilder.buildPartial(); + } + + break; + } + case 90: { + java.lang.String s = input.readStringRequireUtf8(); + + clusterUuid_ = s; + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -506,7 +539,7 @@ public com.google.cloud.dataproc.v1beta2.WorkflowMetadata.State getState() { private volatile java.lang.Object clusterName_; /** *
-   * Output only. The name of the managed cluster.
+   * Output only. The name of the target cluster.
    * 
* * string cluster_name = 7; @@ -525,7 +558,7 @@ public java.lang.String getClusterName() { } /** *
-   * Output only. The name of the managed cluster.
+   * Output only. The name of the target cluster.
    * 
* * string cluster_name = 7; @@ -636,6 +669,114 @@ public java.lang.String getParametersOrThrow( return map.get(key); } + public static final int START_TIME_FIELD_NUMBER = 9; + private com.google.protobuf.Timestamp startTime_; + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public boolean hasStartTime() { + return startTime_ != null; + } + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.Timestamp getStartTime() { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + return getStartTime(); + } + + public static final int END_TIME_FIELD_NUMBER = 10; + private com.google.protobuf.Timestamp endTime_; + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public boolean hasEndTime() { + return endTime_ != null; + } + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.Timestamp getEndTime() { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + return getEndTime(); + } + + public static final int CLUSTER_UUID_FIELD_NUMBER = 11; + private volatile java.lang.Object clusterUuid_; + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + public java.lang.String getClusterUuid() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterUuid_ = s; + return s; + } + } + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + public com.google.protobuf.ByteString + getClusterUuidBytes() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -677,6 +818,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) internalGetParameters(), ParametersDefaultEntryHolder.defaultEntry, 8); + if (startTime_ != null) { + output.writeMessage(9, getStartTime()); + } + if (endTime_ != null) { + output.writeMessage(10, getEndTime()); + } + if (!getClusterUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 11, clusterUuid_); + } unknownFields.writeTo(output); } @@ -722,6 +872,17 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(8, parameters__); } + if (startTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(9, getStartTime()); + } + if (endTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, getEndTime()); + } + if (!getClusterUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, clusterUuid_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -762,6 +923,18 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getClusterName()); result = result && internalGetParameters().equals( other.internalGetParameters()); + result = result && (hasStartTime() == other.hasStartTime()); + if (hasStartTime()) { + result = result && getStartTime() + .equals(other.getStartTime()); + } + result = result && (hasEndTime() == other.hasEndTime()); + if (hasEndTime()) { + result = result && getEndTime() + .equals(other.getEndTime()); + } + result = result && getClusterUuid() + .equals(other.getClusterUuid()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -797,6 +970,16 @@ public int hashCode() { hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + internalGetParameters().hashCode(); } + if (hasStartTime()) { + hash = (37 * hash) + START_TIME_FIELD_NUMBER; + hash = (53 * hash) + getStartTime().hashCode(); + } + if (hasEndTime()) { + hash = (37 * hash) + END_TIME_FIELD_NUMBER; + hash = (53 * hash) + getEndTime().hashCode(); + } + hash = (37 * hash) + CLUSTER_UUID_FIELD_NUMBER; + hash = (53 * hash) + getClusterUuid().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -983,6 +1166,20 @@ public Builder clear() { clusterName_ = ""; internalGetMutableParameters().clear(); + if (startTimeBuilder_ == null) { + startTime_ = null; + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + if (endTimeBuilder_ == null) { + endTime_ = null; + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + clusterUuid_ = ""; + return this; } @@ -1032,6 +1229,17 @@ public com.google.cloud.dataproc.v1beta2.WorkflowMetadata buildPartial() { result.clusterName_ = clusterName_; result.parameters_ = internalGetParameters(); result.parameters_.makeImmutable(); + if (startTimeBuilder_ == null) { + result.startTime_ = startTime_; + } else { + result.startTime_ = startTimeBuilder_.build(); + } + if (endTimeBuilder_ == null) { + result.endTime_ = endTime_; + } else { + result.endTime_ = endTimeBuilder_.build(); + } + result.clusterUuid_ = clusterUuid_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1106,6 +1314,16 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.WorkflowMetadata othe } internalGetMutableParameters().mergeFrom( other.internalGetParameters()); + if (other.hasStartTime()) { + mergeStartTime(other.getStartTime()); + } + if (other.hasEndTime()) { + mergeEndTime(other.getEndTime()); + } + if (!other.getClusterUuid().isEmpty()) { + clusterUuid_ = other.clusterUuid_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1793,7 +2011,7 @@ public Builder clearState() { private java.lang.Object clusterName_ = ""; /** *
-     * Output only. The name of the managed cluster.
+     * Output only. The name of the target cluster.
      * 
* * string cluster_name = 7; @@ -1812,7 +2030,7 @@ public java.lang.String getClusterName() { } /** *
-     * Output only. The name of the managed cluster.
+     * Output only. The name of the target cluster.
      * 
* * string cluster_name = 7; @@ -1832,7 +2050,7 @@ public java.lang.String getClusterName() { } /** *
-     * Output only. The name of the managed cluster.
+     * Output only. The name of the target cluster.
      * 
* * string cluster_name = 7; @@ -1849,7 +2067,7 @@ public Builder setClusterName( } /** *
-     * Output only. The name of the managed cluster.
+     * Output only. The name of the target cluster.
      * 
* * string cluster_name = 7; @@ -1862,7 +2080,7 @@ public Builder clearClusterName() { } /** *
-     * Output only. The name of the managed cluster.
+     * Output only. The name of the target cluster.
      * 
* * string cluster_name = 7; @@ -2029,6 +2247,401 @@ public Builder putAllParameters( .putAll(values); return this; } + + private com.google.protobuf.Timestamp startTime_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public boolean hasStartTime() { + return startTimeBuilder_ != null || startTime_ != null; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.Timestamp getStartTime() { + if (startTimeBuilder_ == null) { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } else { + return startTimeBuilder_.getMessage(); + } + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder setStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + startTime_ = value; + onChanged(); + } else { + startTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder setStartTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (startTimeBuilder_ == null) { + startTime_ = builderForValue.build(); + onChanged(); + } else { + startTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder mergeStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (startTime_ != null) { + startTime_ = + com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial(); + } else { + startTime_ = value; + } + onChanged(); + } else { + startTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public Builder clearStartTime() { + if (startTimeBuilder_ == null) { + startTime_ = null; + onChanged(); + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { + + onChanged(); + return getStartTimeFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + if (startTimeBuilder_ != null) { + return startTimeBuilder_.getMessageOrBuilder(); + } else { + return startTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + } + /** + *
+     * Output only. Workflow start time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getStartTimeFieldBuilder() { + if (startTimeBuilder_ == null) { + startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getStartTime(), + getParentForChildren(), + isClean()); + startTime_ = null; + } + return startTimeBuilder_; + } + + private com.google.protobuf.Timestamp endTime_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> endTimeBuilder_; + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public boolean hasEndTime() { + return endTimeBuilder_ != null || endTime_ != null; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.Timestamp getEndTime() { + if (endTimeBuilder_ == null) { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } else { + return endTimeBuilder_.getMessage(); + } + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder setEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + endTime_ = value; + onChanged(); + } else { + endTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder setEndTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (endTimeBuilder_ == null) { + endTime_ = builderForValue.build(); + onChanged(); + } else { + endTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder mergeEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (endTime_ != null) { + endTime_ = + com.google.protobuf.Timestamp.newBuilder(endTime_).mergeFrom(value).buildPartial(); + } else { + endTime_ = value; + } + onChanged(); + } else { + endTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public Builder clearEndTime() { + if (endTimeBuilder_ == null) { + endTime_ = null; + onChanged(); + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + + return this; + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { + + onChanged(); + return getEndTimeFieldBuilder().getBuilder(); + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + if (endTimeBuilder_ != null) { + return endTimeBuilder_.getMessageOrBuilder(); + } else { + return endTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + } + /** + *
+     * Output only. Workflow end time.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getEndTimeFieldBuilder() { + if (endTimeBuilder_ == null) { + endTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getEndTime(), + getParentForChildren(), + isClean()); + endTime_ = null; + } + return endTimeBuilder_; + } + + private java.lang.Object clusterUuid_ = ""; + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public java.lang.String getClusterUuid() { + java.lang.Object ref = clusterUuid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + clusterUuid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public com.google.protobuf.ByteString + getClusterUuidBytes() { + java.lang.Object ref = clusterUuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + clusterUuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public Builder setClusterUuid( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + clusterUuid_ = value; + onChanged(); + return this; + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public Builder clearClusterUuid() { + + clusterUuid_ = getDefaultInstance().getClusterUuid(); + onChanged(); + return this; + } + /** + *
+     * Output only. The UUID of target cluster.
+     * 
+ * + * string cluster_uuid = 11; + */ + public Builder setClusterUuidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + clusterUuid_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java index 634ee85eec8c..331eb6deb572 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java @@ -129,7 +129,7 @@ public interface WorkflowMetadataOrBuilder extends /** *
-   * Output only. The name of the managed cluster.
+   * Output only. The name of the target cluster.
    * 
* * string cluster_name = 7; @@ -137,7 +137,7 @@ public interface WorkflowMetadataOrBuilder extends java.lang.String getClusterName(); /** *
-   * Output only. The name of the managed cluster.
+   * Output only. The name of the target cluster.
    * 
* * string cluster_name = 7; @@ -198,4 +198,72 @@ java.lang.String getParametersOrDefault( java.lang.String getParametersOrThrow( java.lang.String key); + + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + boolean hasStartTime(); + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + com.google.protobuf.Timestamp getStartTime(); + /** + *
+   * Output only. Workflow start time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 9; + */ + com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder(); + + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + boolean hasEndTime(); + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + com.google.protobuf.Timestamp getEndTime(); + /** + *
+   * Output only. Workflow end time.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 10; + */ + com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder(); + + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + java.lang.String getClusterUuid(); + /** + *
+   * Output only. The UUID of target cluster.
+   * 
+ * + * string cluster_uuid = 11; + */ + com.google.protobuf.ByteString + getClusterUuidBytes(); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java index 896bed458d97..f56c34c54a8f 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java @@ -24,6 +24,7 @@ private WorkflowTemplate() { name_ = ""; version_ = 0; jobs_ = java.util.Collections.emptyList(); + parameters_ = java.util.Collections.emptyList(); } @java.lang.Override @@ -128,6 +129,15 @@ private WorkflowTemplate( input.readMessage(com.google.cloud.dataproc.v1beta2.OrderedJob.parser(), extensionRegistry)); break; } + case 74: { + if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000100; + } + parameters_.add( + input.readMessage(com.google.cloud.dataproc.v1beta2.TemplateParameter.parser(), extensionRegistry)); + break; + } default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { @@ -146,6 +156,9 @@ private WorkflowTemplate( if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { jobs_ = java.util.Collections.unmodifiableList(jobs_); } + if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = java.util.Collections.unmodifiableList(parameters_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -568,6 +581,71 @@ public com.google.cloud.dataproc.v1beta2.OrderedJobOrBuilder getJobsOrBuilder( return jobs_.get(index); } + public static final int PARAMETERS_FIELD_NUMBER = 9; + private java.util.List parameters_; + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public java.util.List getParametersList() { + return parameters_; + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public java.util.List + getParametersOrBuilderList() { + return parameters_; + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public int getParametersCount() { + return parameters_.size(); + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameter getParameters(int index) { + return parameters_.get(index); + } + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder getParametersOrBuilder( + int index) { + return parameters_.get(index); + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -609,6 +687,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < jobs_.size(); i++) { output.writeMessage(8, jobs_.get(i)); } + for (int i = 0; i < parameters_.size(); i++) { + output.writeMessage(9, parameters_.get(i)); + } unknownFields.writeTo(output); } @@ -654,6 +735,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(8, jobs_.get(i)); } + for (int i = 0; i < parameters_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(9, parameters_.get(i)); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -695,6 +780,8 @@ public boolean equals(final java.lang.Object obj) { } result = result && getJobsList() .equals(other.getJobsList()); + result = result && getParametersList() + .equals(other.getParametersList()); result = result && unknownFields.equals(other.unknownFields); return result; } @@ -732,6 +819,10 @@ public int hashCode() { hash = (37 * hash) + JOBS_FIELD_NUMBER; hash = (53 * hash) + getJobsList().hashCode(); } + if (getParametersCount() > 0) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + getParametersList().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -887,6 +978,7 @@ private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getJobsFieldBuilder(); + getParametersFieldBuilder(); } } @java.lang.Override @@ -923,6 +1015,12 @@ public Builder clear() { } else { jobsBuilder_.clear(); } + if (parametersBuilder_ == null) { + parameters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000100); + } else { + parametersBuilder_.clear(); + } return this; } @@ -980,6 +1078,15 @@ public com.google.cloud.dataproc.v1beta2.WorkflowTemplate buildPartial() { } else { result.jobs_ = jobsBuilder_.build(); } + if (parametersBuilder_ == null) { + if (((bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = java.util.Collections.unmodifiableList(parameters_); + bitField0_ = (bitField0_ & ~0x00000100); + } + result.parameters_ = parameters_; + } else { + result.parameters_ = parametersBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1077,6 +1184,32 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.WorkflowTemplate othe } } } + if (parametersBuilder_ == null) { + if (!other.parameters_.isEmpty()) { + if (parameters_.isEmpty()) { + parameters_ = other.parameters_; + bitField0_ = (bitField0_ & ~0x00000100); + } else { + ensureParametersIsMutable(); + parameters_.addAll(other.parameters_); + } + onChanged(); + } + } else { + if (!other.parameters_.isEmpty()) { + if (parametersBuilder_.isEmpty()) { + parametersBuilder_.dispose(); + parametersBuilder_ = null; + parameters_ = other.parameters_; + bitField0_ = (bitField0_ & ~0x00000100); + parametersBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getParametersFieldBuilder() : null; + } else { + parametersBuilder_.addAllMessages(other.parameters_); + } + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2346,6 +2479,354 @@ public com.google.cloud.dataproc.v1beta2.OrderedJob.Builder addJobsBuilder( } return jobsBuilder_; } + + private java.util.List parameters_ = + java.util.Collections.emptyList(); + private void ensureParametersIsMutable() { + if (!((bitField0_ & 0x00000100) == 0x00000100)) { + parameters_ = new java.util.ArrayList(parameters_); + bitField0_ |= 0x00000100; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.TemplateParameter, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder, com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder> parametersBuilder_; + + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public java.util.List getParametersList() { + if (parametersBuilder_ == null) { + return java.util.Collections.unmodifiableList(parameters_); + } else { + return parametersBuilder_.getMessageList(); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public int getParametersCount() { + if (parametersBuilder_ == null) { + return parameters_.size(); + } else { + return parametersBuilder_.getCount(); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameter getParameters(int index) { + if (parametersBuilder_ == null) { + return parameters_.get(index); + } else { + return parametersBuilder_.getMessage(index); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder setParameters( + int index, com.google.cloud.dataproc.v1beta2.TemplateParameter value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParametersIsMutable(); + parameters_.set(index, value); + onChanged(); + } else { + parametersBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder setParameters( + int index, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder builderForValue) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.set(index, builderForValue.build()); + onChanged(); + } else { + parametersBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder addParameters(com.google.cloud.dataproc.v1beta2.TemplateParameter value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParametersIsMutable(); + parameters_.add(value); + onChanged(); + } else { + parametersBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder addParameters( + int index, com.google.cloud.dataproc.v1beta2.TemplateParameter value) { + if (parametersBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParametersIsMutable(); + parameters_.add(index, value); + onChanged(); + } else { + parametersBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder addParameters( + com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder builderForValue) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.add(builderForValue.build()); + onChanged(); + } else { + parametersBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder addParameters( + int index, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder builderForValue) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.add(index, builderForValue.build()); + onChanged(); + } else { + parametersBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder addAllParameters( + java.lang.Iterable values) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, parameters_); + onChanged(); + } else { + parametersBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder clearParameters() { + if (parametersBuilder_ == null) { + parameters_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000100); + onChanged(); + } else { + parametersBuilder_.clear(); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public Builder removeParameters(int index) { + if (parametersBuilder_ == null) { + ensureParametersIsMutable(); + parameters_.remove(index); + onChanged(); + } else { + parametersBuilder_.remove(index); + } + return this; + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder getParametersBuilder( + int index) { + return getParametersFieldBuilder().getBuilder(index); + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder getParametersOrBuilder( + int index) { + if (parametersBuilder_ == null) { + return parameters_.get(index); } else { + return parametersBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public java.util.List + getParametersOrBuilderList() { + if (parametersBuilder_ != null) { + return parametersBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(parameters_); + } + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder addParametersBuilder() { + return getParametersFieldBuilder().addBuilder( + com.google.cloud.dataproc.v1beta2.TemplateParameter.getDefaultInstance()); + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder addParametersBuilder( + int index) { + return getParametersFieldBuilder().addBuilder( + index, com.google.cloud.dataproc.v1beta2.TemplateParameter.getDefaultInstance()); + } + /** + *
+     * Optional. Template parameters whose values are substituted into the
+     * template. Values for parameters must be provided when the template is
+     * instantiated.
+     * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + public java.util.List + getParametersBuilderList() { + return getParametersFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.TemplateParameter, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder, com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder> + getParametersFieldBuilder() { + if (parametersBuilder_ == null) { + parametersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.dataproc.v1beta2.TemplateParameter, com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder, com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder>( + parameters_, + ((bitField0_ & 0x00000100) == 0x00000100), + getParentForChildren(), + isClean()); + parameters_ = null; + } + return parametersBuilder_; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java index 894e69ae91bc..2bfcfd3bdf9e 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java @@ -281,4 +281,58 @@ java.lang.String getLabelsOrThrow( */ com.google.cloud.dataproc.v1beta2.OrderedJobOrBuilder getJobsOrBuilder( int index); + + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + java.util.List + getParametersList(); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + com.google.cloud.dataproc.v1beta2.TemplateParameter getParameters(int index); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + int getParametersCount(); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + java.util.List + getParametersOrBuilderList(); + /** + *
+   * Optional. Template parameters whose values are substituted into the
+   * template. Values for parameters must be provided when the template is
+   * instantiated.
+   * 
+ * + * repeated .google.cloud.dataproc.v1beta2.TemplateParameter parameters = 9; + */ + com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder getParametersOrBuilder( + int index); } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java index ffe4b96b2118..95b028266f48 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java @@ -59,6 +59,26 @@ public static void registerAllExtensions( static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_dataproc_v1beta2_OrderedJob_LabelsEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1beta2_RegexValidation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1beta2_RegexValidation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1beta2_ValueValidation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1beta2_ValueValidation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_descriptor; static final @@ -99,6 +119,11 @@ public static void registerAllExtensions( static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_dataproc_v1beta2_InstantiateInlineWorkflowTemplateRequest_descriptor; static final @@ -140,7 +165,7 @@ public static void registerAllExtensions( "oto\032(google/cloud/dataproc/v1beta2/jobs." + "proto\032#google/longrunning/operations.pro" + "to\032\033google/protobuf/empty.proto\032\037google/" + - "protobuf/timestamp.proto\"\241\003\n\020WorkflowTem" + + "protobuf/timestamp.proto\"\347\003\n\020WorkflowTem" + "plate\022\n\n\002id\030\002 \001(\t\022\014\n\004name\030\001 \001(\t\022\017\n\007versi" + "on\030\003 \001(\005\022/\n\013create_time\030\004 \001(\0132\032.google.p" + "rotobuf.Timestamp\022/\n\013update_time\030\005 \001(\0132\032" + @@ -149,138 +174,157 @@ public static void registerAllExtensions( "lowTemplate.LabelsEntry\022K\n\tplacement\030\007 \001" + "(\01328.google.cloud.dataproc.v1beta2.Workf" + "lowTemplatePlacement\0227\n\004jobs\030\010 \003(\0132).goo" + - "gle.cloud.dataproc.v1beta2.OrderedJob\032-\n" + - "\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t" + - ":\0028\001\"\276\001\n\031WorkflowTemplatePlacement\022H\n\017ma" + - "naged_cluster\030\001 \001(\0132-.google.cloud.datap" + - "roc.v1beta2.ManagedClusterH\000\022J\n\020cluster_" + - "selector\030\002 \001(\0132..google.cloud.dataproc.v" + - "1beta2.ClusterSelectorH\000B\013\n\tplacement\"\336\001" + - "\n\016ManagedCluster\022\024\n\014cluster_name\030\002 \001(\t\022<" + - "\n\006config\030\003 \001(\0132,.google.cloud.dataproc.v" + - "1beta2.ClusterConfig\022I\n\006labels\030\004 \003(\01329.g" + - "oogle.cloud.dataproc.v1beta2.ManagedClus" + - "ter.LabelsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 " + - "\001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\260\001\n\017ClusterSelect" + - "or\022\014\n\004zone\030\001 \001(\t\022Y\n\016cluster_labels\030\002 \003(\013" + - "2A.google.cloud.dataproc.v1beta2.Cluster" + - "Selector.ClusterLabelsEntry\0324\n\022ClusterLa" + - "belsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028" + - "\001\"\373\004\n\nOrderedJob\022\017\n\007step_id\030\001 \001(\t\022>\n\nhad" + - "oop_job\030\002 \001(\0132(.google.cloud.dataproc.v1" + - "beta2.HadoopJobH\000\022<\n\tspark_job\030\003 \001(\0132\'.g" + - "oogle.cloud.dataproc.v1beta2.SparkJobH\000\022" + - "@\n\013pyspark_job\030\004 \001(\0132).google.cloud.data" + - "proc.v1beta2.PySparkJobH\000\022:\n\010hive_job\030\005 " + - "\001(\0132&.google.cloud.dataproc.v1beta2.Hive" + - "JobH\000\0228\n\007pig_job\030\006 \001(\0132%.google.cloud.da" + - "taproc.v1beta2.PigJobH\000\022C\n\rspark_sql_job" + - "\030\007 \001(\0132*.google.cloud.dataproc.v1beta2.S" + - "parkSqlJobH\000\022E\n\006labels\030\010 \003(\01325.google.cl" + - "oud.dataproc.v1beta2.OrderedJob.LabelsEn" + - "try\022@\n\nscheduling\030\t \001(\0132,.google.cloud.d" + - "ataproc.v1beta2.JobScheduling\022\035\n\025prerequ" + - "isite_step_ids\030\n \003(\t\032-\n\013LabelsEntry\022\013\n\003k" + - "ey\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\n\n\010job_type\"" + - "\242\004\n\020WorkflowMetadata\022\020\n\010template\030\001 \001(\t\022\017" + - "\n\007version\030\002 \001(\005\022G\n\016create_cluster\030\003 \001(\0132" + - "/.google.cloud.dataproc.v1beta2.ClusterO" + - "peration\022;\n\005graph\030\004 \001(\0132,.google.cloud.d" + - "ataproc.v1beta2.WorkflowGraph\022G\n\016delete_" + - "cluster\030\005 \001(\0132/.google.cloud.dataproc.v1" + - "beta2.ClusterOperation\022D\n\005state\030\006 \001(\01625." + - "google.cloud.dataproc.v1beta2.WorkflowMe" + - "tadata.State\022\024\n\014cluster_name\030\007 \001(\t\022S\n\npa" + - "rameters\030\010 \003(\0132?.google.cloud.dataproc.v" + - "1beta2.WorkflowMetadata.ParametersEntry\032" + - "1\n\017ParametersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value" + - "\030\002 \001(\t:\0028\001\"8\n\005State\022\013\n\007UNKNOWN\020\000\022\013\n\007PEND" + - "ING\020\001\022\013\n\007RUNNING\020\002\022\010\n\004DONE\020\003\"E\n\020ClusterO" + - "peration\022\024\n\014operation_id\030\001 \001(\t\022\r\n\005error\030" + - "\002 \001(\t\022\014\n\004done\030\003 \001(\010\"K\n\rWorkflowGraph\022:\n\005" + - "nodes\030\001 \003(\0132+.google.cloud.dataproc.v1be" + - "ta2.WorkflowNode\"\220\002\n\014WorkflowNode\022\017\n\007ste" + - "p_id\030\001 \001(\t\022\035\n\025prerequisite_step_ids\030\002 \003(" + - "\t\022\016\n\006job_id\030\003 \001(\t\022D\n\005state\030\005 \001(\01625.googl" + - "e.cloud.dataproc.v1beta2.WorkflowNode.No" + - "deState\022\r\n\005error\030\006 \001(\t\"k\n\tNodeState\022\033\n\027N" + - "ODE_STATUS_UNSPECIFIED\020\000\022\013\n\007BLOCKED\020\001\022\014\n" + - "\010RUNNABLE\020\002\022\013\n\007RUNNING\020\003\022\r\n\tCOMPLETED\020\004\022" + - "\n\n\006FAILED\020\005\"r\n\035CreateWorkflowTemplateReq" + - "uest\022\016\n\006parent\030\001 \001(\t\022A\n\010template\030\002 \001(\0132/" + - ".google.cloud.dataproc.v1beta2.WorkflowT" + - "emplate\";\n\032GetWorkflowTemplateRequest\022\014\n" + - "\004name\030\001 \001(\t\022\017\n\007version\030\002 \001(\005\"X\n\"Instanti" + - "ateWorkflowTemplateRequest\022\014\n\004name\030\001 \001(\t" + - "\022\017\n\007version\030\002 \001(\005\022\023\n\013instance_id\030\003 \001(\t\"\222" + - "\001\n(InstantiateInlineWorkflowTemplateRequ" + - "est\022\016\n\006parent\030\001 \001(\t\022A\n\010template\030\002 \001(\0132/." + - "google.cloud.dataproc.v1beta2.WorkflowTe" + - "mplate\022\023\n\013instance_id\030\003 \001(\t\"b\n\035UpdateWor" + - "kflowTemplateRequest\022A\n\010template\030\001 \001(\0132/" + - ".google.cloud.dataproc.v1beta2.WorkflowT" + - "emplate\"U\n\034ListWorkflowTemplatesRequest\022" + - "\016\n\006parent\030\001 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npa" + - "ge_token\030\003 \001(\t\"|\n\035ListWorkflowTemplatesR" + - "esponse\022B\n\ttemplates\030\001 \003(\0132/.google.clou" + - "d.dataproc.v1beta2.WorkflowTemplate\022\027\n\017n" + - "ext_page_token\030\002 \001(\t\">\n\035DeleteWorkflowTe" + + "gle.cloud.dataproc.v1beta2.OrderedJob\022D\n" + + "\nparameters\030\t \003(\01320.google.cloud.datapro" + + "c.v1beta2.TemplateParameter\032-\n\013LabelsEnt" + + "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\276\001\n\031W" + + "orkflowTemplatePlacement\022H\n\017managed_clus" + + "ter\030\001 \001(\0132-.google.cloud.dataproc.v1beta" + + "2.ManagedClusterH\000\022J\n\020cluster_selector\030\002" + + " \001(\0132..google.cloud.dataproc.v1beta2.Clu" + + "sterSelectorH\000B\013\n\tplacement\"\336\001\n\016ManagedC" + + "luster\022\024\n\014cluster_name\030\002 \001(\t\022<\n\006config\030\003" + + " \001(\0132,.google.cloud.dataproc.v1beta2.Clu" + + "sterConfig\022I\n\006labels\030\004 \003(\01329.google.clou" + + "d.dataproc.v1beta2.ManagedCluster.Labels" + + "Entry\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005val" + + "ue\030\002 \001(\t:\0028\001\"\260\001\n\017ClusterSelector\022\014\n\004zone" + + "\030\001 \001(\t\022Y\n\016cluster_labels\030\002 \003(\0132A.google." + + "cloud.dataproc.v1beta2.ClusterSelector.C" + + "lusterLabelsEntry\0324\n\022ClusterLabelsEntry\022" + + "\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\373\004\n\nOrde" + + "redJob\022\017\n\007step_id\030\001 \001(\t\022>\n\nhadoop_job\030\002 " + + "\001(\0132(.google.cloud.dataproc.v1beta2.Hado" + + "opJobH\000\022<\n\tspark_job\030\003 \001(\0132\'.google.clou" + + "d.dataproc.v1beta2.SparkJobH\000\022@\n\013pyspark" + + "_job\030\004 \001(\0132).google.cloud.dataproc.v1bet" + + "a2.PySparkJobH\000\022:\n\010hive_job\030\005 \001(\0132&.goog" + + "le.cloud.dataproc.v1beta2.HiveJobH\000\0228\n\007p" + + "ig_job\030\006 \001(\0132%.google.cloud.dataproc.v1b" + + "eta2.PigJobH\000\022C\n\rspark_sql_job\030\007 \001(\0132*.g" + + "oogle.cloud.dataproc.v1beta2.SparkSqlJob" + + "H\000\022E\n\006labels\030\010 \003(\01325.google.cloud.datapr" + + "oc.v1beta2.OrderedJob.LabelsEntry\022@\n\nsch" + + "eduling\030\t \001(\0132,.google.cloud.dataproc.v1" + + "beta2.JobScheduling\022\035\n\025prerequisite_step" + + "_ids\030\n \003(\t\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r" + + "\n\005value\030\002 \001(\t:\0028\001B\n\n\010job_type\"\216\001\n\021Templa" + + "teParameter\022\014\n\004name\030\001 \001(\t\022\016\n\006fields\030\002 \003(" + + "\t\022\023\n\013description\030\003 \001(\t\022F\n\nvalidation\030\004 \001" + + "(\01322.google.cloud.dataproc.v1beta2.Param" + + "eterValidation\"\253\001\n\023ParameterValidation\022?" + + "\n\005regex\030\001 \001(\0132..google.cloud.dataproc.v1" + + "beta2.RegexValidationH\000\022@\n\006values\030\002 \001(\0132" + + "..google.cloud.dataproc.v1beta2.ValueVal" + + "idationH\000B\021\n\017validation_type\"\"\n\017RegexVal" + + "idation\022\017\n\007regexes\030\001 \003(\t\"!\n\017ValueValidat" + + "ion\022\016\n\006values\030\001 \003(\t\"\226\005\n\020WorkflowMetadata" + + "\022\020\n\010template\030\001 \001(\t\022\017\n\007version\030\002 \001(\005\022G\n\016c" + + "reate_cluster\030\003 \001(\0132/.google.cloud.datap" + + "roc.v1beta2.ClusterOperation\022;\n\005graph\030\004 " + + "\001(\0132,.google.cloud.dataproc.v1beta2.Work" + + "flowGraph\022G\n\016delete_cluster\030\005 \001(\0132/.goog" + + "le.cloud.dataproc.v1beta2.ClusterOperati" + + "on\022D\n\005state\030\006 \001(\01625.google.cloud.datapro" + + "c.v1beta2.WorkflowMetadata.State\022\024\n\014clus" + + "ter_name\030\007 \001(\t\022S\n\nparameters\030\010 \003(\0132?.goo" + + "gle.cloud.dataproc.v1beta2.WorkflowMetad" + + "ata.ParametersEntry\022.\n\nstart_time\030\t \001(\0132" + + "\032.google.protobuf.Timestamp\022,\n\010end_time\030" + + "\n \001(\0132\032.google.protobuf.Timestamp\022\024\n\014clu" + + "ster_uuid\030\013 \001(\t\0321\n\017ParametersEntry\022\013\n\003ke" + + "y\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"8\n\005State\022\013\n\007U" + + "NKNOWN\020\000\022\013\n\007PENDING\020\001\022\013\n\007RUNNING\020\002\022\010\n\004DO" + + "NE\020\003\"E\n\020ClusterOperation\022\024\n\014operation_id" + + "\030\001 \001(\t\022\r\n\005error\030\002 \001(\t\022\014\n\004done\030\003 \001(\010\"K\n\rW" + + "orkflowGraph\022:\n\005nodes\030\001 \003(\0132+.google.clo" + + "ud.dataproc.v1beta2.WorkflowNode\"\220\002\n\014Wor" + + "kflowNode\022\017\n\007step_id\030\001 \001(\t\022\035\n\025prerequisi" + + "te_step_ids\030\002 \003(\t\022\016\n\006job_id\030\003 \001(\t\022D\n\005sta" + + "te\030\005 \001(\01625.google.cloud.dataproc.v1beta2" + + ".WorkflowNode.NodeState\022\r\n\005error\030\006 \001(\t\"k" + + "\n\tNodeState\022\033\n\027NODE_STATUS_UNSPECIFIED\020\000" + + "\022\013\n\007BLOCKED\020\001\022\014\n\010RUNNABLE\020\002\022\013\n\007RUNNING\020\003" + + "\022\r\n\tCOMPLETED\020\004\022\n\n\006FAILED\020\005\"r\n\035CreateWor" + + "kflowTemplateRequest\022\016\n\006parent\030\001 \001(\t\022A\n\010" + + "template\030\002 \001(\0132/.google.cloud.dataproc.v" + + "1beta2.WorkflowTemplate\";\n\032GetWorkflowTe" + "mplateRequest\022\014\n\004name\030\001 \001(\t\022\017\n\007version\030\002" + - " \001(\0052\337\017\n\027WorkflowTemplateService\022\235\002\n\026Cre" + - "ateWorkflowTemplate\022<.google.cloud.datap" + - "roc.v1beta2.CreateWorkflowTemplateReques" + - "t\032/.google.cloud.dataproc.v1beta2.Workfl" + - "owTemplate\"\223\001\202\323\344\223\002\214\001\"8/v1beta2/{parent=p" + - "rojects/*/regions/*}/workflowTemplates:\010" + - "templateZF\":/v1beta2/{parent=projects/*/" + - "locations/*}/workflowTemplates:\010template" + - "\022\201\002\n\023GetWorkflowTemplate\0229.google.cloud." + - "dataproc.v1beta2.GetWorkflowTemplateRequ" + - "est\032/.google.cloud.dataproc.v1beta2.Work" + - "flowTemplate\"~\202\323\344\223\002x\0228/v1beta2/{name=pro" + - "jects/*/regions/*/workflowTemplates/*}Z<" + - "\022:/v1beta2/{name=projects/*/locations/*/" + - "workflowTemplates/*}\022\237\002\n\033InstantiateWork" + - "flowTemplate\022A.google.cloud.dataproc.v1b" + - "eta2.InstantiateWorkflowTemplateRequest\032" + - "\035.google.longrunning.Operation\"\235\001\202\323\344\223\002\226\001" + - "\"D/v1beta2/{name=projects/*/regions/*/wo" + - "rkflowTemplates/*}:instantiate:\001*ZK\"F/v1" + - "beta2/{name=projects/*/locations/*/workf" + - "lowTemplates/*}:instantiate:\001*\022\305\002\n!Insta" + - "ntiateInlineWorkflowTemplate\022G.google.cl" + - "oud.dataproc.v1beta2.InstantiateInlineWo" + - "rkflowTemplateRequest\032\035.google.longrunni" + - "ng.Operation\"\267\001\202\323\344\223\002\260\001\"J/v1beta2/{parent" + - "=projects/*/regions/*}/workflowTemplates" + - ":instantiateInline:\010templateZX\"L/v1beta2" + - "/{parent=projects/*/locations/*}/workflo" + - "wTemplates:instantiateInline:\010template\022\257" + - "\002\n\026UpdateWorkflowTemplate\022<.google.cloud" + - ".dataproc.v1beta2.UpdateWorkflowTemplate" + - "Request\032/.google.cloud.dataproc.v1beta2." + - "WorkflowTemplate\"\245\001\202\323\344\223\002\236\001\032A/v1beta2/{te" + - "mplate.name=projects/*/regions/*/workflo" + - "wTemplates/*}:\010templateZO\032C/v1beta2/{tem" + - "plate.name=projects/*/locations/*/workfl" + - "owTemplates/*}:\010template\022\222\002\n\025ListWorkflo" + - "wTemplates\022;.google.cloud.dataproc.v1bet" + - "a2.ListWorkflowTemplatesRequest\032<.google" + - ".cloud.dataproc.v1beta2.ListWorkflowTemp" + - "latesResponse\"~\202\323\344\223\002x\0228/v1beta2/{parent=" + - "projects/*/regions/*}/workflowTemplatesZ" + - "<\022:/v1beta2/{parent=projects/*/locations" + - "/*}/workflowTemplates\022\356\001\n\026DeleteWorkflow" + - "Template\022<.google.cloud.dataproc.v1beta2" + - ".DeleteWorkflowTemplateRequest\032\026.google." + - "protobuf.Empty\"~\202\323\344\223\002x*8/v1beta2/{name=p" + - "rojects/*/regions/*/workflowTemplates/*}" + - "Z<*:/v1beta2/{name=projects/*/locations/" + - "*/workflowTemplates/*}B\204\001\n!com.google.cl" + - "oud.dataproc.v1beta2B\026WorkflowTemplatesP" + - "rotoP\001ZEgoogle.golang.org/genproto/googl" + - "eapis/cloud/dataproc/v1beta2;dataprocb\006p" + - "roto3" + " \001(\005\"\212\002\n\"InstantiateWorkflowTemplateRequ" + + "est\022\014\n\004name\030\001 \001(\t\022\017\n\007version\030\002 \001(\005\022\027\n\013in" + + "stance_id\030\003 \001(\tB\002\030\001\022\022\n\nrequest_id\030\005 \001(\t\022" + + "e\n\nparameters\030\004 \003(\0132Q.google.cloud.datap" + + "roc.v1beta2.InstantiateWorkflowTemplateR" + + "equest.ParametersEntry\0321\n\017ParametersEntr" + + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\246\001\n(In" + + "stantiateInlineWorkflowTemplateRequest\022\016" + + "\n\006parent\030\001 \001(\t\022A\n\010template\030\002 \001(\0132/.googl" + + "e.cloud.dataproc.v1beta2.WorkflowTemplat" + + "e\022\023\n\013instance_id\030\003 \001(\t\022\022\n\nrequest_id\030\004 \001" + + "(\t\"b\n\035UpdateWorkflowTemplateRequest\022A\n\010t" + + "emplate\030\001 \001(\0132/.google.cloud.dataproc.v1" + + "beta2.WorkflowTemplate\"U\n\034ListWorkflowTe" + + "mplatesRequest\022\016\n\006parent\030\001 \001(\t\022\021\n\tpage_s" + + "ize\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"|\n\035ListWor" + + "kflowTemplatesResponse\022B\n\ttemplates\030\001 \003(" + + "\0132/.google.cloud.dataproc.v1beta2.Workfl" + + "owTemplate\022\027\n\017next_page_token\030\002 \001(\t\">\n\035D" + + "eleteWorkflowTemplateRequest\022\014\n\004name\030\001 \001" + + "(\t\022\017\n\007version\030\002 \001(\0052\337\017\n\027WorkflowTemplate" + + "Service\022\235\002\n\026CreateWorkflowTemplate\022<.goo" + + "gle.cloud.dataproc.v1beta2.CreateWorkflo" + + "wTemplateRequest\032/.google.cloud.dataproc" + + ".v1beta2.WorkflowTemplate\"\223\001\202\323\344\223\002\214\001\"8/v1" + + "beta2/{parent=projects/*/regions/*}/work" + + "flowTemplates:\010templateZF\":/v1beta2/{par" + + "ent=projects/*/locations/*}/workflowTemp" + + "lates:\010template\022\201\002\n\023GetWorkflowTemplate\022" + + "9.google.cloud.dataproc.v1beta2.GetWorkf" + + "lowTemplateRequest\032/.google.cloud.datapr" + + "oc.v1beta2.WorkflowTemplate\"~\202\323\344\223\002x\0228/v1" + + "beta2/{name=projects/*/regions/*/workflo" + + "wTemplates/*}Z<\022:/v1beta2/{name=projects" + + "/*/locations/*/workflowTemplates/*}\022\237\002\n\033" + + "InstantiateWorkflowTemplate\022A.google.clo" + + "ud.dataproc.v1beta2.InstantiateWorkflowT" + + "emplateRequest\032\035.google.longrunning.Oper" + + "ation\"\235\001\202\323\344\223\002\226\001\"D/v1beta2/{name=projects" + + "/*/regions/*/workflowTemplates/*}:instan" + + "tiate:\001*ZK\"F/v1beta2/{name=projects/*/lo" + + "cations/*/workflowTemplates/*}:instantia" + + "te:\001*\022\305\002\n!InstantiateInlineWorkflowTempl" + + "ate\022G.google.cloud.dataproc.v1beta2.Inst" + + "antiateInlineWorkflowTemplateRequest\032\035.g" + + "oogle.longrunning.Operation\"\267\001\202\323\344\223\002\260\001\"L/" + + "v1beta2/{parent=projects/*/locations/*}/" + + "workflowTemplates:instantiateInline:\010tem" + + "plateZV\"J/v1beta2/{parent=projects/*/reg" + + "ions/*}/workflowTemplates:instantiateInl" + + "ine:\010template\022\257\002\n\026UpdateWorkflowTemplate" + + "\022<.google.cloud.dataproc.v1beta2.UpdateW" + + "orkflowTemplateRequest\032/.google.cloud.da" + + "taproc.v1beta2.WorkflowTemplate\"\245\001\202\323\344\223\002\236" + + "\001\032A/v1beta2/{template.name=projects/*/re" + + "gions/*/workflowTemplates/*}:\010templateZO" + + "\032C/v1beta2/{template.name=projects/*/loc" + + "ations/*/workflowTemplates/*}:\010template\022" + + "\222\002\n\025ListWorkflowTemplates\022;.google.cloud" + + ".dataproc.v1beta2.ListWorkflowTemplatesR" + + "equest\032<.google.cloud.dataproc.v1beta2.L" + + "istWorkflowTemplatesResponse\"~\202\323\344\223\002x\0228/v" + + "1beta2/{parent=projects/*/regions/*}/wor" + + "kflowTemplatesZ<\022:/v1beta2/{parent=proje" + + "cts/*/locations/*}/workflowTemplates\022\356\001\n" + + "\026DeleteWorkflowTemplate\022<.google.cloud.d" + + "ataproc.v1beta2.DeleteWorkflowTemplateRe" + + "quest\032\026.google.protobuf.Empty\"~\202\323\344\223\002x*8/" + + "v1beta2/{name=projects/*/regions/*/workf" + + "lowTemplates/*}Z<*:/v1beta2/{name=projec" + + "ts/*/locations/*/workflowTemplates/*}B\204\001" + + "\n!com.google.cloud.dataproc.v1beta2B\026Wor" + + "kflowTemplatesProtoP\001ZEgoogle.golang.org" + + "/genproto/googleapis/cloud/dataproc/v1be" + + "ta2;dataprocb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -305,7 +349,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_descriptor, - new java.lang.String[] { "Id", "Name", "Version", "CreateTime", "UpdateTime", "Labels", "Placement", "Jobs", }); + new java.lang.String[] { "Id", "Name", "Version", "CreateTime", "UpdateTime", "Labels", "Placement", "Jobs", "Parameters", }); internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_LabelsEntry_descriptor = internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_descriptor.getNestedTypes().get(0); internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_LabelsEntry_fieldAccessorTable = new @@ -354,12 +398,36 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_OrderedJob_LabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); - internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_descriptor = + internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_descriptor = getDescriptor().getMessageTypes().get(5); + internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1beta2_TemplateParameter_descriptor, + new java.lang.String[] { "Name", "Fields", "Description", "Validation", }); + internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1beta2_ParameterValidation_descriptor, + new java.lang.String[] { "Regex", "Values", "ValidationType", }); + internal_static_google_cloud_dataproc_v1beta2_RegexValidation_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_google_cloud_dataproc_v1beta2_RegexValidation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1beta2_RegexValidation_descriptor, + new java.lang.String[] { "Regexes", }); + internal_static_google_cloud_dataproc_v1beta2_ValueValidation_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_google_cloud_dataproc_v1beta2_ValueValidation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1beta2_ValueValidation_descriptor, + new java.lang.String[] { "Values", }); + internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_descriptor = + getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_descriptor, - new java.lang.String[] { "Template", "Version", "CreateCluster", "Graph", "DeleteCluster", "State", "ClusterName", "Parameters", }); + new java.lang.String[] { "Template", "Version", "CreateCluster", "Graph", "DeleteCluster", "State", "ClusterName", "Parameters", "StartTime", "EndTime", "ClusterUuid", }); internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_ParametersEntry_descriptor = internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_descriptor.getNestedTypes().get(0); internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_ParametersEntry_fieldAccessorTable = new @@ -367,67 +435,73 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_ParametersEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1beta2_ClusterOperation_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_dataproc_v1beta2_ClusterOperation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ClusterOperation_descriptor, new java.lang.String[] { "OperationId", "Error", "Done", }); internal_static_google_cloud_dataproc_v1beta2_WorkflowGraph_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_dataproc_v1beta2_WorkflowGraph_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_WorkflowGraph_descriptor, new java.lang.String[] { "Nodes", }); internal_static_google_cloud_dataproc_v1beta2_WorkflowNode_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_dataproc_v1beta2_WorkflowNode_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_WorkflowNode_descriptor, new java.lang.String[] { "StepId", "PrerequisiteStepIds", "JobId", "State", "Error", }); internal_static_google_cloud_dataproc_v1beta2_CreateWorkflowTemplateRequest_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_dataproc_v1beta2_CreateWorkflowTemplateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_CreateWorkflowTemplateRequest_descriptor, new java.lang.String[] { "Parent", "Template", }); internal_static_google_cloud_dataproc_v1beta2_GetWorkflowTemplateRequest_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_dataproc_v1beta2_GetWorkflowTemplateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_GetWorkflowTemplateRequest_descriptor, new java.lang.String[] { "Name", "Version", }); internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor, - new java.lang.String[] { "Name", "Version", "InstanceId", }); + new java.lang.String[] { "Name", "Version", "InstanceId", "RequestId", "Parameters", }); + internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor = + internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_descriptor.getNestedTypes().get(0); + internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_dataproc_v1beta2_InstantiateWorkflowTemplateRequest_ParametersEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_dataproc_v1beta2_InstantiateInlineWorkflowTemplateRequest_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_dataproc_v1beta2_InstantiateInlineWorkflowTemplateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_InstantiateInlineWorkflowTemplateRequest_descriptor, - new java.lang.String[] { "Parent", "Template", "InstanceId", }); + new java.lang.String[] { "Parent", "Template", "InstanceId", "RequestId", }); internal_static_google_cloud_dataproc_v1beta2_UpdateWorkflowTemplateRequest_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_dataproc_v1beta2_UpdateWorkflowTemplateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_UpdateWorkflowTemplateRequest_descriptor, new java.lang.String[] { "Template", }); internal_static_google_cloud_dataproc_v1beta2_ListWorkflowTemplatesRequest_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_dataproc_v1beta2_ListWorkflowTemplatesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ListWorkflowTemplatesRequest_descriptor, new java.lang.String[] { "Parent", "PageSize", "PageToken", }); internal_static_google_cloud_dataproc_v1beta2_ListWorkflowTemplatesResponse_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(19); internal_static_google_cloud_dataproc_v1beta2_ListWorkflowTemplatesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_ListWorkflowTemplatesResponse_descriptor, new java.lang.String[] { "Templates", "NextPageToken", }); internal_static_google_cloud_dataproc_v1beta2_DeleteWorkflowTemplateRequest_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(20); internal_static_google_cloud_dataproc_v1beta2_DeleteWorkflowTemplateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_dataproc_v1beta2_DeleteWorkflowTemplateRequest_descriptor, diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto index 0fb03e3f751b..5ddc69bb90c1 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; @@ -112,7 +113,7 @@ message Cluster { // generates this value when it creates the cluster. string cluster_uuid = 6; - // Contains cluster daemon metrics such as HDFS and YARN stats. + // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. // // **Beta Feature**: This report is available for testing purposes only. It may // be changed before final release. @@ -163,6 +164,16 @@ message ClusterConfig { // ... worker specific actions ... // fi repeated NodeInitializationAction initialization_actions = 11; + + // Optional. Encryption settings for the cluster. + EncryptionConfig encryption_config = 15; +} + +// Encryption settings for the cluster. +message EncryptionConfig { + // Optional. The Cloud KMS key name to use for PD disk encryption for all + // instances in the cluster. + string gce_pd_kms_key_name = 1; } // Common config settings for resources of Compute Engine cluster @@ -260,8 +271,9 @@ message InstanceGroupConfig { // from `cluster_name`, `num_instances`, and the instance group. repeated string instance_names = 2; - // Output only. The Compute Engine image resource used for cluster - // instances. Inferred from `SoftwareConfig.image_version`. + // Optional. The Compute Engine image resource used for cluster + // instances. It can be specified or may be inferred from + // `SoftwareConfig.image_version`. string image_uri = 3; // Optional. The Compute Engine machine type used for cluster instances. @@ -353,10 +365,14 @@ message DiskConfig { int32 num_local_ssds = 2; } -// Specifies the cluster auto delete related schedule configuration. +// Specifies the cluster auto-delete schedule configuration. message LifecycleConfig { - // Optional. The longest duration that cluster would keep alive while staying - // idle; passing this threshold will cause cluster to be auto-deleted. + // Optional. The duration to keep the cluster alive while idling. + // Passing this threshold will cause the cluster to be + // deleted. Valid range: **[10m, 14d]**. + // + // Example: **"10m"**, the minimum value, to delete the + // cluster when it has had no jobs running for 10 minutes. google.protobuf.Duration idle_delete_ttl = 1; // Optional. Either the exact time the cluster should be deleted at or @@ -365,8 +381,10 @@ message LifecycleConfig { // Optional. The time when cluster will be auto-deleted. google.protobuf.Timestamp auto_delete_time = 2; - // Optional. The life duration of cluster, the cluster will be auto-deleted - // at the end of this duration. + // Optional. The lifetime duration of cluster. The cluster will be + // auto-deleted at the end of this period. Valid range: **[10m, 14d]**. + // + // Example: **"1d"**, to delete the cluster 1 day after its creation.. google.protobuf.Duration auto_delete_ttl = 3; } } diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto index d5635583ecc4..7aff5f462ad1 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; @@ -576,6 +577,10 @@ message Job { // be changed before final release. repeated YarnApplication yarn_applications = 9; + // Output only. The email address of the user submitting the job. For jobs + // submitted on the cluster, the address is username@hostname. + string submitted_by = 10; + // Output only. A URI pointing to the location of the stdout of the job's // driver program. string driver_output_resource_uri = 17; @@ -595,6 +600,11 @@ message Job { // Optional. Job scheduling configuration. JobScheduling scheduling = 20; + + // Output only. A UUID that uniquely identifies a job within the project + // over time. This is in contrast to a user-settable reference.job_id that + // may be reused over time. + string job_uuid = 22; } // Job scheduling options. diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/operations.proto b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/operations.proto index 8c428dae2190..717410832ff6 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/operations.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/operations.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto index 801708a52c87..8d2f5e62834c 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; diff --git a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto index 4db43168c813..982f874d63b1 100644 --- a/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto +++ b/google-api-grpc/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; @@ -110,10 +111,10 @@ service WorkflowTemplateService { // [Empty][google.protobuf.Empty]. rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { option (google.api.http) = { - post: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" + post: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" body: "template" additional_bindings { - post: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" + post: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" body: "template" } }; @@ -203,6 +204,11 @@ message WorkflowTemplate { // Required. The Directed Acyclic Graph of Jobs to submit. repeated OrderedJob jobs = 8; + + // Optional. Template parameters whose values are substituted into the + // template. Values for parameters must be provided when the template is + // instantiated. + repeated TemplateParameter parameters = 9; } // Specifies workflow execution target. @@ -319,6 +325,104 @@ message OrderedJob { repeated string prerequisite_step_ids = 10; } +// A configurable parameter that replaces one or more fields in the template. +// Parameterizable fields: +// - Labels +// - File uris +// - Job properties +// - Job arguments +// - Script variables +// - Main class (in HadoopJob and SparkJob) +// - Zone (in ClusterSelector) +message TemplateParameter { + // Required. Parameter name. + // The parameter name is used as the key, and paired with the + // parameter value, which are passed to the template when the template + // is instantiated. + // The name must contain only capital letters (A-Z), numbers (0-9), and + // underscores (_), and must not start with a number. The maximum length is + // 40 characters. + string name = 1; + + // Required. Paths to all fields that the parameter replaces. + // A field is allowed to appear in at most one parameter's list of field paths. + // + // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. + // For example, a field path that references the zone field of a workflow + // template's cluster selector would be specified as + // `placement.clusterSelector.zone`. + // + // Also, field paths can reference fields using the following syntax: + // + // * Values in maps can be referenced by key: + // * labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * placement.managedCluster.labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * jobs['step-id'].labels['key'] + // + // * Jobs in the jobs list can be referenced by step-id: + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * jobs['step-id'].hiveJob.queryFileUri + // * jobs['step-id'].pySparkJob.mainPythonFileUri + // * jobs['step-id'].hadoopJob.jarFileUris[0] + // * jobs['step-id'].hadoopJob.archiveUris[0] + // * jobs['step-id'].hadoopJob.fileUris[0] + // * jobs['step-id'].pySparkJob.pythonFileUris[0] + // + // * Items in repeated fields can be referenced by a zero-based index: + // * jobs['step-id'].sparkJob.args[0] + // + // * Other examples: + // * jobs['step-id'].hadoopJob.properties['key'] + // * jobs['step-id'].hadoopJob.args[0] + // * jobs['step-id'].hiveJob.scriptVariables['key'] + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * placement.clusterSelector.zone + // + // It may not be possible to parameterize maps and repeated fields in their + // entirety since only individual map values and individual items in repeated + // fields can be referenced. For example, the following field paths are + // invalid: + // + // - placement.clusterSelector.clusterLabels + // - jobs['step-id'].sparkJob.args + repeated string fields = 2; + + // Optional. Brief description of the parameter. + // Must not exceed 1024 characters. + string description = 3; + + // Optional. Validation rules to be applied to this parameter's value. + ParameterValidation validation = 4; +} + +// Configuration for parameter validation. +message ParameterValidation { + // Required. The type of validation to be performed. + oneof validation_type { + // Validation based on regular expressions. + RegexValidation regex = 1; + + // Validation based on a list of allowed values. + ValueValidation values = 2; + } +} + +// Validation based on regular expressions. +message RegexValidation { + // Required. RE2 regular expressions used to validate the parameter's value. + // The value must match the regex in its entirety (substring + // matches are not sufficient). + repeated string regexes = 1; +} + +// Validation based on a list of allowed values. +message ValueValidation { + // Required. List of allowed values for the parameter. + repeated string values = 1; +} + // A Cloud Dataproc workflow template resource. message WorkflowMetadata { // The operation state. @@ -355,11 +459,20 @@ message WorkflowMetadata { // Output only. The workflow state. State state = 6; - // Output only. The name of the managed cluster. + // Output only. The name of the target cluster. string cluster_name = 7; // Map from parameter names to values that were used for those parameters. map parameters = 8; + + // Output only. Workflow start time. + google.protobuf.Timestamp start_time = 9; + + // Output only. Workflow end time. + google.protobuf.Timestamp end_time = 10; + + // Output only. The UUID of target cluster. + string cluster_uuid = 11; } // The cluster operation triggered by a workflow. @@ -460,6 +573,9 @@ message InstantiateWorkflowTemplateRequest { // workflow template. int32 version = 2; + // Deprecated. Please use `request_id` field instead. + string instance_id = 3 [deprecated = true]; + // Optional. A tag that prevents multiple concurrent workflow // instances with the same tag from running. This mitigates risk of // concurrent instances started due to retries. @@ -469,7 +585,11 @@ message InstantiateWorkflowTemplateRequest { // // The tag must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string instance_id = 3; + string request_id = 5; + + // Optional. Map from parameter names to values that should be used for those + // parameters. Values may not exceed 100 characters. + map parameters = 4; } // A request to instantiate an inline workflow template. @@ -482,6 +602,9 @@ message InstantiateInlineWorkflowTemplateRequest { // Required. The workflow template to instantiate. WorkflowTemplate template = 2; + // Deprecated. Please use `request_id` field instead. + string instance_id = 3; + // Optional. A tag that prevents multiple concurrent workflow // instances with the same tag from running. This mitigates risk of // concurrent instances started due to retries. @@ -491,7 +614,7 @@ message InstantiateInlineWorkflowTemplateRequest { // // The tag must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string instance_id = 3; + string request_id = 4; } // A request to update a workflow template. diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java index 6557032e4c05..948d3b2b3307 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java @@ -32,6 +32,7 @@ import com.google.longrunning.Operation; import com.google.longrunning.OperationsClient; import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; @@ -39,8 +40,8 @@ // AUTO-GENERATED DOCUMENTATION AND SERVICE /** - * Service Description: The ClusterControllerService provides methods to manage clusters of Google - * Compute Engine instances. + * Service Description: The ClusterControllerService provides methods to manage clusters of Compute + * Engine instances. * *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: @@ -293,6 +294,65 @@ public final UnaryCallable createClusterCallabl return stub.createClusterCallable(); } + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Updates a cluster in a project. + * + *

Sample code: + * + *


+   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
+   *   String projectId = "";
+   *   String region = "";
+   *   String clusterName = "";
+   *   Cluster cluster = Cluster.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Cluster response = clusterControllerClient.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get();
+   * }
+   * 
+ * + * @param projectId Required. The ID of the Google Cloud Platform project the cluster belongs to. + * @param region Required. The Cloud Dataproc region in which to handle the request. + * @param clusterName Required. The cluster name. + * @param cluster Required. The changes to the cluster. + * @param updateMask Required. Specifies the path, relative to `Cluster`, of the field to update. + * For example, to change the number of workers in a cluster to 5, the `update_mask` parameter + * would be specified as `config.worker_config.num_instances`, and the `PATCH` request body + * would specify the new value, as follows: + *

{ "config":{ "workerConfig":{ "numInstances":"5" } } } Similarly, to change the number + * of preemptible workers in a cluster to 5, the `update_mask` parameter would be + * `config.secondary_worker_config.num_instances`, and the `PATCH` request body would be set + * as follows: + *

{ "config":{ "secondaryWorkerConfig":{ "numInstances":"5" } } } + * <strong>Note:</strong> Currently, only the following fields can be updated: + *

<table> <tbody> <tr> + * <td><strong>Mask</strong></td> + * <td><strong>Purpose</strong></td> </tr> <tr> + * <td><strong><em>labels</em></strong></td> + * <td>Update labels</td> </tr> <tr> + * <td><strong><em>config.worker_config.num_instances</em></strong></td> + * <td>Resize primary worker group</td> </tr> <tr> + * <td><strong><em>config.secondary_worker_config.num_instances</em></strong></td> + * <td>Resize secondary worker group</td> </tr> </tbody> + * </table> + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture updateClusterAsync( + String projectId, String region, String clusterName, Cluster cluster, FieldMask updateMask) { + + UpdateClusterRequest request = + UpdateClusterRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setClusterName(clusterName) + .setCluster(cluster) + .setUpdateMask(updateMask) + .build(); + return updateClusterAsync(request); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Updates a cluster in a project. @@ -620,6 +680,53 @@ public final ListClustersPagedResponse listClusters(String projectId, String reg return listClusters(request); } + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists all regions/{region}/clusters in a project. + * + *

Sample code: + * + *


+   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
+   *   String projectId = "";
+   *   String region = "";
+   *   String filter = "";
+   *   for (Cluster element : clusterControllerClient.listClusters(projectId, region, filter).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ * + * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs + * to. + * @param region Required. The Cloud Dataproc region in which to handle the request. + * @param filter Optional. A filter constraining the clusters to list. Filters are case-sensitive + * and have the following syntax: + *

field = value [AND [field = value]] ... + *

where **field** is one of `status.state`, `clusterName`, or + * `labels.[KEY]`, and `[KEY]` is a label key. **value** can be `*` to + * match all values. `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` contains the + * `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` contains the `DELETING` and + * `ERROR` states. `clusterName` is the name of the cluster provided at creation time. Only + * the logical `AND` operator is supported; space-separated items are treated as having an + * implicit `AND` operator. + *

Example filter: + *

status.state = ACTIVE AND clusterName = mycluster AND labels.env = staging AND + * labels.starred = * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListClustersPagedResponse listClusters( + String projectId, String region, String filter) { + ListClustersRequest request = + ListClustersRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setFilter(filter) + .build(); + return listClusters(request); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists all regions/{region}/clusters in a project. diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java index bc351f07773c..7393b8f935e1 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java @@ -345,6 +345,46 @@ public final ListJobsPagedResponse listJobs(String projectId, String region) { return listJobs(request); } + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists regions/{region}/jobs in a project. + * + *

Sample code: + * + *


+   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
+   *   String projectId = "";
+   *   String region = "";
+   *   String filter = "";
+   *   for (Job element : jobControllerClient.listJobs(projectId, region, filter).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ * + * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. + * @param region Required. The Cloud Dataproc region in which to handle the request. + * @param filter Optional. A filter constraining the jobs to list. Filters are case-sensitive and + * have the following syntax: + *

[field = value] AND [field [= value]] ... + *

where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a + * label key. **value** can be `*` to match all values. `status.state` can + * be either `ACTIVE` or `NON_ACTIVE`. Only the logical `AND` operator is supported; + * space-separated items are treated as having an implicit `AND` operator. + *

Example filter: + *

status.state = ACTIVE AND labels.env = staging AND labels.starred = * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListJobsPagedResponse listJobs(String projectId, String region, String filter) { + ListJobsRequest request = + ListJobsRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setFilter(filter) + .build(); + return listJobs(request); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists regions/{region}/jobs in a project. diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java new file mode 100644 index 000000000000..6984af326682 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java @@ -0,0 +1,1086 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1; + +import com.google.api.core.ApiFunction; +import com.google.api.core.ApiFuture; +import com.google.api.core.ApiFutures; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.longrunning.OperationFuture; +import com.google.api.gax.paging.AbstractFixedSizeCollection; +import com.google.api.gax.paging.AbstractPage; +import com.google.api.gax.paging.AbstractPagedListResponse; +import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.PageContext; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.dataproc.v1.stub.WorkflowTemplateServiceStub; +import com.google.cloud.dataproc.v1.stub.WorkflowTemplateServiceStubSettings; +import com.google.longrunning.Operation; +import com.google.longrunning.OperationsClient; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND SERVICE +/** + * Service Description: The API interface for managing Workflow Templates in the Cloud Dataproc API. + * + *

This class provides the ability to make remote calls to the backing service through method + * calls that map to API methods. Sample code to get started: + * + *

+ * 
+ * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+ *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+ *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+ *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(formattedParent, template);
+ * }
+ * 
+ * 
+ * + *

Note: close() needs to be called on the workflowTemplateServiceClient object to clean up + * resources such as threads. In the example above, try-with-resources is used, which automatically + * calls close(). + * + *

The surface of this class includes several types of Java methods for each of the API's + * methods: + * + *

    + *
  1. A "flattened" method. With this type of method, the fields of the request type have been + * converted into function parameters. It may be the case that not all fields are available as + * parameters, and not every API method will have a flattened method entry point. + *
  2. A "request object" method. This type of method only takes one parameter, a request object, + * which must be constructed before the call. Not every API method will have a request object + * method. + *
  3. A "callable" method. This type of method takes no parameters and returns an immutable API + * callable object, which can be used to initiate calls to the service. + *
+ * + *

See the individual methods for example code. + * + *

Many parameters require resource names to be formatted in a particular way. To assist with + * these names, this class includes a format method for each type of name, and additionally a parse + * method to extract the individual identifiers contained within names that are returned. + * + *

This class can be customized by passing in a custom instance of + * WorkflowTemplateServiceSettings to create(). For example: + * + *

To customize credentials: + * + *

+ * 
+ * WorkflowTemplateServiceSettings workflowTemplateServiceSettings =
+ *     WorkflowTemplateServiceSettings.newBuilder()
+ *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
+ *         .build();
+ * WorkflowTemplateServiceClient workflowTemplateServiceClient =
+ *     WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings);
+ * 
+ * 
+ * + * To customize the endpoint: + * + *
+ * 
+ * WorkflowTemplateServiceSettings workflowTemplateServiceSettings =
+ *     WorkflowTemplateServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
+ * WorkflowTemplateServiceClient workflowTemplateServiceClient =
+ *     WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings);
+ * 
+ * 
+ */ +@Generated("by gapic-generator") +@BetaApi +public class WorkflowTemplateServiceClient implements BackgroundResource { + private final WorkflowTemplateServiceSettings settings; + private final WorkflowTemplateServiceStub stub; + private final OperationsClient operationsClient; + + /** Constructs an instance of WorkflowTemplateServiceClient with default settings. */ + public static final WorkflowTemplateServiceClient create() throws IOException { + return create(WorkflowTemplateServiceSettings.newBuilder().build()); + } + + /** + * Constructs an instance of WorkflowTemplateServiceClient, using the given settings. The channels + * are created based on the settings passed in, or defaults for any settings that are not set. + */ + public static final WorkflowTemplateServiceClient create(WorkflowTemplateServiceSettings settings) + throws IOException { + return new WorkflowTemplateServiceClient(settings); + } + + /** + * Constructs an instance of WorkflowTemplateServiceClient, using the given stub for making calls. + * This is for advanced usage - prefer to use WorkflowTemplateServiceSettings}. + */ + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + public static final WorkflowTemplateServiceClient create(WorkflowTemplateServiceStub stub) { + return new WorkflowTemplateServiceClient(stub); + } + + /** + * Constructs an instance of WorkflowTemplateServiceClient, using the given settings. This is + * protected so that it is easy to make a subclass, but otherwise, the static factory methods + * should be preferred. + */ + protected WorkflowTemplateServiceClient(WorkflowTemplateServiceSettings settings) + throws IOException { + this.settings = settings; + this.stub = ((WorkflowTemplateServiceStubSettings) settings.getStubSettings()).createStub(); + this.operationsClient = OperationsClient.create(this.stub.getOperationsStub()); + } + + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + protected WorkflowTemplateServiceClient(WorkflowTemplateServiceStub stub) { + this.settings = null; + this.stub = stub; + this.operationsClient = OperationsClient.create(this.stub.getOperationsStub()); + } + + public final WorkflowTemplateServiceSettings getSettings() { + return settings; + } + + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + public WorkflowTemplateServiceStub getStub() { + return stub; + } + + /** + * Returns the OperationsClient that can be used to query the status of a long-running operation + * returned by another API method call. + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationsClient getOperationsClient() { + return operationsClient; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Creates new workflow template. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(formattedParent, template);
+   * }
+   * 
+ * + * @param parent Required. The "resource name" of the region, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}` + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate(String parent, WorkflowTemplate template) { + + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder().setParent(parent).setTemplate(template).build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Creates new workflow template. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(request);
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateRequest request) { + return createWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Creates new workflow template. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.createWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   WorkflowTemplate response = future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + createWorkflowTemplateCallable() { + return stub.createWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(formattedName);
+   * }
+   * 
+ * + * @param name Required. The "resource name" of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(String name) { + + GetWorkflowTemplateRequest request = + GetWorkflowTemplateRequest.newBuilder().setName(name).build(); + return getWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(request);
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest request) { + return getWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.getWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   WorkflowTemplate response = future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + getWorkflowTemplateCallable() { + return stub.getWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(formattedName).get();
+   * }
+   * 
+ * + * @param name Required. The "resource name" of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateWorkflowTemplateAsync( + String name) { + + InstantiateWorkflowTemplateRequest request = + InstantiateWorkflowTemplateRequest.newBuilder().setName(name).build(); + return instantiateWorkflowTemplateAsync(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   Map<String, String> parameters = new HashMap<>();
+   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(formattedName, parameters).get();
+   * }
+   * 
+ * + * @param name Required. The "resource name" of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * @param parameters Optional. Map from parameter names to values that should be used for those + * parameters. Values may not exceed 100 characters. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateWorkflowTemplateAsync( + String name, Map parameters) { + + InstantiateWorkflowTemplateRequest request = + InstantiateWorkflowTemplateRequest.newBuilder() + .setName(name) + .putAllParameters(parameters) + .build(); + return instantiateWorkflowTemplateAsync(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(request).get();
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateWorkflowTemplateAsync( + InstantiateWorkflowTemplateRequest request) { + return instantiateWorkflowTemplateOperationCallable().futureCall(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   OperationFuture<Operation> future = workflowTemplateServiceClient.instantiateWorkflowTemplateOperationCallable().futureCall(request);
+   *   // Do something
+   *   Empty response = future.get();
+   * }
+   * 
+ */ + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public final OperationCallable + instantiateWorkflowTemplateOperationCallable() { + return stub.instantiateWorkflowTemplateOperationCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + instantiateWorkflowTemplateCallable() { + return stub.instantiateWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   Empty response = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(formattedParent, template).get();
+   * }
+   * 
+ * + * @param parent Required. The "resource name" of the workflow template region, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}` + * @param template Required. The workflow template to instantiate. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateInlineWorkflowTemplateAsync( + String parent, WorkflowTemplate template) { + + InstantiateInlineWorkflowTemplateRequest request = + InstantiateInlineWorkflowTemplateRequest.newBuilder() + .setParent(parent) + .setTemplate(template) + .build(); + return instantiateInlineWorkflowTemplateAsync(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   Empty response = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(request).get();
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateInlineWorkflowTemplateAsync( + InstantiateInlineWorkflowTemplateRequest request) { + return instantiateInlineWorkflowTemplateOperationCallable().futureCall(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   OperationFuture<Operation> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateOperationCallable().futureCall(request);
+   *   // Do something
+   *   Empty response = future.get();
+   * }
+   * 
+ */ + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public final OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + return stub.instantiateInlineWorkflowTemplateOperationCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + instantiateInlineWorkflowTemplateCallable() { + return stub.instantiateInlineWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Updates (replaces) workflow template. The updated template must contain version that matches + * the current server version. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   WorkflowTemplate response = workflowTemplateServiceClient.updateWorkflowTemplate(template);
+   * }
+   * 
+ * + * @param template Required. The updated workflow template. + *

The `template.version` field must match the current version. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate updateWorkflowTemplate(WorkflowTemplate template) { + + UpdateWorkflowTemplateRequest request = + UpdateWorkflowTemplateRequest.newBuilder().setTemplate(template).build(); + return updateWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Updates (replaces) workflow template. The updated template must contain version that matches + * the current server version. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   UpdateWorkflowTemplateRequest request = UpdateWorkflowTemplateRequest.newBuilder()
+   *     .setTemplate(template)
+   *     .build();
+   *   WorkflowTemplate response = workflowTemplateServiceClient.updateWorkflowTemplate(request);
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate updateWorkflowTemplate(UpdateWorkflowTemplateRequest request) { + return updateWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Updates (replaces) workflow template. The updated template must contain version that matches + * the current server version. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   UpdateWorkflowTemplateRequest request = UpdateWorkflowTemplateRequest.newBuilder()
+   *     .setTemplate(template)
+   *     .build();
+   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.updateWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   WorkflowTemplate response = future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + updateWorkflowTemplateCallable() { + return stub.updateWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists workflows that match the specified filter in the request. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(formattedParent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ * + * @param parent Required. The "resource name" of the region, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(String parent) { + ListWorkflowTemplatesRequest request = + ListWorkflowTemplatesRequest.newBuilder().setParent(parent).build(); + return listWorkflowTemplates(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists workflows that match the specified filter in the request. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .build();
+   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates( + ListWorkflowTemplatesRequest request) { + return listWorkflowTemplatesPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists workflows that match the specified filter in the request. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .build();
+   *   ApiFuture<ListWorkflowTemplatesPagedResponse> future = workflowTemplateServiceClient.listWorkflowTemplatesPagedCallable().futureCall(request);
+   *   // Do something
+   *   for (WorkflowTemplate element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ */ + public final UnaryCallable + listWorkflowTemplatesPagedCallable() { + return stub.listWorkflowTemplatesPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists workflows that match the specified filter in the request. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .build();
+   *   while (true) {
+   *     ListWorkflowTemplatesResponse response = workflowTemplateServiceClient.listWorkflowTemplatesCallable().call(request);
+   *     for (WorkflowTemplate element : response.getTemplatesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * 
+ */ + public final UnaryCallable + listWorkflowTemplatesCallable() { + return stub.listWorkflowTemplatesCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Deletes a workflow template. It does not cancel in-progress workflows. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   workflowTemplateServiceClient.deleteWorkflowTemplate(formattedName);
+   * }
+   * 
+ * + * @param name Required. The "resource name" of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteWorkflowTemplate(String name) { + + DeleteWorkflowTemplateRequest request = + DeleteWorkflowTemplateRequest.newBuilder().setName(name).build(); + deleteWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Deletes a workflow template. It does not cancel in-progress workflows. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   workflowTemplateServiceClient.deleteWorkflowTemplate(request);
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteWorkflowTemplate(DeleteWorkflowTemplateRequest request) { + deleteWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Deletes a workflow template. It does not cancel in-progress workflows. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
+   *     .setName(formattedName)
+   *     .build();
+   *   ApiFuture<Void> future = workflowTemplateServiceClient.deleteWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + deleteWorkflowTemplateCallable() { + return stub.deleteWorkflowTemplateCallable(); + } + + @Override + public final void close() { + stub.close(); + } + + @Override + public void shutdown() { + stub.shutdown(); + } + + @Override + public boolean isShutdown() { + return stub.isShutdown(); + } + + @Override + public boolean isTerminated() { + return stub.isTerminated(); + } + + @Override + public void shutdownNow() { + stub.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return stub.awaitTermination(duration, unit); + } + + public static class ListWorkflowTemplatesPagedResponse + extends AbstractPagedListResponse< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate, + ListWorkflowTemplatesPage, ListWorkflowTemplatesFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext + context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListWorkflowTemplatesPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, + new ApiFunction() { + @Override + public ListWorkflowTemplatesPagedResponse apply(ListWorkflowTemplatesPage input) { + return new ListWorkflowTemplatesPagedResponse(input); + } + }); + } + + private ListWorkflowTemplatesPagedResponse(ListWorkflowTemplatesPage page) { + super(page, ListWorkflowTemplatesFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListWorkflowTemplatesPage + extends AbstractPage< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate, + ListWorkflowTemplatesPage> { + + private ListWorkflowTemplatesPage( + PageContext + context, + ListWorkflowTemplatesResponse response) { + super(context, response); + } + + private static ListWorkflowTemplatesPage createEmptyPage() { + return new ListWorkflowTemplatesPage(null, null); + } + + @Override + protected ListWorkflowTemplatesPage createPage( + PageContext + context, + ListWorkflowTemplatesResponse response) { + return new ListWorkflowTemplatesPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext + context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListWorkflowTemplatesFixedSizeCollection + extends AbstractFixedSizeCollection< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate, + ListWorkflowTemplatesPage, ListWorkflowTemplatesFixedSizeCollection> { + + private ListWorkflowTemplatesFixedSizeCollection( + List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListWorkflowTemplatesFixedSizeCollection createEmptyCollection() { + return new ListWorkflowTemplatesFixedSizeCollection(null, 0); + } + + @Override + protected ListWorkflowTemplatesFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListWorkflowTemplatesFixedSizeCollection(pages, collectionSize); + } + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java new file mode 100644 index 000000000000..c96fbe9e731b --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java @@ -0,0 +1,301 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1; + +import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; + +import com.google.api.core.ApiFunction; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.GoogleCredentialsProvider; +import com.google.api.gax.core.InstantiatingExecutorProvider; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.ClientSettings; +import com.google.api.gax.rpc.OperationCallSettings; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.cloud.dataproc.v1.stub.WorkflowTemplateServiceStubSettings; +import com.google.longrunning.Operation; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS +/** + * Settings class to configure an instance of {@link WorkflowTemplateServiceClient}. + * + *

The default instance has everything set to sensible defaults: + * + *

    + *
  • The default service address (dataproc.googleapis.com) and default port (443) are used. + *
  • Credentials are acquired automatically through Application Default Credentials. + *
  • Retries are configured for idempotent methods but not for non-idempotent methods. + *
+ * + *

The builder of this class is recursive, so contained classes are themselves builders. When + * build() is called, the tree of builders is called to create the complete settings object. For + * example, to set the total timeout of createWorkflowTemplate to 30 seconds: + * + *

+ * 
+ * WorkflowTemplateServiceSettings.Builder workflowTemplateServiceSettingsBuilder =
+ *     WorkflowTemplateServiceSettings.newBuilder();
+ * workflowTemplateServiceSettingsBuilder.createWorkflowTemplateSettings().getRetrySettings().toBuilder()
+ *     .setTotalTimeout(Duration.ofSeconds(30));
+ * WorkflowTemplateServiceSettings workflowTemplateServiceSettings = workflowTemplateServiceSettingsBuilder.build();
+ * 
+ * 
+ */ +@Generated("by gapic-generator") +@BetaApi +public class WorkflowTemplateServiceSettings + extends ClientSettings { + /** Returns the object with the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings + createWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .createWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings + getWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()).getWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ + public UnaryCallSettings + instantiateWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .instantiateWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public OperationCallSettings + instantiateWorkflowTemplateOperationSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .instantiateWorkflowTemplateOperationSettings(); + } + + /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ + public UnaryCallSettings + instantiateInlineWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .instantiateInlineWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public OperationCallSettings + instantiateInlineWorkflowTemplateOperationSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .instantiateInlineWorkflowTemplateOperationSettings(); + } + + /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ + public UnaryCallSettings + updateWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .updateWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to listWorkflowTemplates. */ + public PagedCallSettings< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + listWorkflowTemplatesSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .listWorkflowTemplatesSettings(); + } + + /** Returns the object with the settings used for calls to deleteWorkflowTemplate. */ + public UnaryCallSettings deleteWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .deleteWorkflowTemplateSettings(); + } + + public static final WorkflowTemplateServiceSettings create( + WorkflowTemplateServiceStubSettings stub) throws IOException { + return new WorkflowTemplateServiceSettings.Builder(stub.toBuilder()).build(); + } + + /** Returns a builder for the default ExecutorProvider for this service. */ + public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { + return WorkflowTemplateServiceStubSettings.defaultExecutorProviderBuilder(); + } + + /** Returns the default service endpoint. */ + public static String getDefaultEndpoint() { + return WorkflowTemplateServiceStubSettings.getDefaultEndpoint(); + } + + /** Returns the default service scopes. */ + public static List getDefaultServiceScopes() { + return WorkflowTemplateServiceStubSettings.getDefaultServiceScopes(); + } + + /** Returns a builder for the default credentials for this service. */ + public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { + return WorkflowTemplateServiceStubSettings.defaultCredentialsProviderBuilder(); + } + + /** Returns a builder for the default ChannelProvider for this service. */ + public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { + return WorkflowTemplateServiceStubSettings.defaultGrpcTransportProviderBuilder(); + } + + public static TransportChannelProvider defaultTransportChannelProvider() { + return WorkflowTemplateServiceStubSettings.defaultTransportChannelProvider(); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { + return WorkflowTemplateServiceStubSettings.defaultApiClientHeaderProviderBuilder(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder() { + return Builder.createDefault(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder(ClientContext clientContext) { + return new Builder(clientContext); + } + + /** Returns a builder containing all the values of this settings class. */ + public Builder toBuilder() { + return new Builder(this); + } + + protected WorkflowTemplateServiceSettings(Builder settingsBuilder) throws IOException { + super(settingsBuilder); + } + + /** Builder for WorkflowTemplateServiceSettings. */ + public static class Builder + extends ClientSettings.Builder { + protected Builder() throws IOException { + this((ClientContext) null); + } + + protected Builder(ClientContext clientContext) { + super(WorkflowTemplateServiceStubSettings.newBuilder(clientContext)); + } + + private static Builder createDefault() { + return new Builder(WorkflowTemplateServiceStubSettings.newBuilder()); + } + + protected Builder(WorkflowTemplateServiceSettings settings) { + super(settings.getStubSettings().toBuilder()); + } + + protected Builder(WorkflowTemplateServiceStubSettings.Builder stubSettings) { + super(stubSettings); + } + + public WorkflowTemplateServiceStubSettings.Builder getStubSettingsBuilder() { + return ((WorkflowTemplateServiceStubSettings.Builder) getStubSettings()); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' + /** + * Applies the given settings updater function to all of the unary API methods in this service. + * + *

Note: This method does not support applying settings to streaming methods. + */ + public Builder applyToAllUnaryMethods( + ApiFunction, Void> settingsUpdater) throws Exception { + super.applyToAllUnaryMethods( + getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); + return this; + } + + /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings.Builder + createWorkflowTemplateSettings() { + return getStubSettingsBuilder().createWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings.Builder + getWorkflowTemplateSettings() { + return getStubSettingsBuilder().getWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ + public UnaryCallSettings.Builder + instantiateWorkflowTemplateSettings() { + return getStubSettingsBuilder().instantiateWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public OperationCallSettings.Builder< + InstantiateWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateWorkflowTemplateOperationSettings() { + return getStubSettingsBuilder().instantiateWorkflowTemplateOperationSettings(); + } + + /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ + public UnaryCallSettings.Builder + instantiateInlineWorkflowTemplateSettings() { + return getStubSettingsBuilder().instantiateInlineWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public OperationCallSettings.Builder< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings() { + return getStubSettingsBuilder().instantiateInlineWorkflowTemplateOperationSettings(); + } + + /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ + public UnaryCallSettings.Builder + updateWorkflowTemplateSettings() { + return getStubSettingsBuilder().updateWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to listWorkflowTemplates. */ + public PagedCallSettings.Builder< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + listWorkflowTemplatesSettings() { + return getStubSettingsBuilder().listWorkflowTemplatesSettings(); + } + + /** Returns the builder for the settings used for calls to deleteWorkflowTemplate. */ + public UnaryCallSettings.Builder + deleteWorkflowTemplateSettings() { + return getStubSettingsBuilder().deleteWorkflowTemplateSettings(); + } + + @Override + public WorkflowTemplateServiceSettings build() throws IOException { + return new WorkflowTemplateServiceSettings(this); + } + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java index a434d4212d05..17f90816b57d 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java @@ -22,7 +22,7 @@ *

======================= ClusterControllerClient ======================= * *

Service Description: The ClusterControllerService provides methods to manage clusters of - * Google Compute Engine instances. + * Compute Engine instances. * *

Sample for ClusterControllerClient: * @@ -53,5 +53,22 @@ * } * *

+ * + * ============================= WorkflowTemplateServiceClient ============================= + * + *

Service Description: The API interface for managing Workflow Templates in the Cloud Dataproc + * API. + * + *

Sample for WorkflowTemplateServiceClient: + * + *

+ * 
+ * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+ *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+ *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+ *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(formattedParent, template);
+ * }
+ * 
+ * 
*/ package com.google.cloud.dataproc.v1; diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java index ee4eca3d3a5b..5c427eafa258 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java @@ -342,8 +342,12 @@ public static class Builder extends StubSettings.BuildernewArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.newArrayList())); + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.INTERNAL, + StatusCode.Code.UNAVAILABLE))); + definitions.put( + "non_idempotent", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -428,7 +432,7 @@ private static Builder initDefaults(Builder builder) { builder .deleteClusterSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder @@ -466,7 +470,7 @@ private static Builder initDefaults(Builder builder) { .setInitialRpcTimeout(Duration.ZERO) // ignored .setRpcTimeoutMultiplier(1.0) // ignored .setMaxRpcTimeout(Duration.ZERO) // ignored - .setTotalTimeout(Duration.ofMillis(300000L)) + .setTotalTimeout(Duration.ofMillis(900000L)) .build())); builder .updateClusterOperationSettings() @@ -489,7 +493,7 @@ private static Builder initDefaults(Builder builder) { .setInitialRpcTimeout(Duration.ZERO) // ignored .setRpcTimeoutMultiplier(1.0) // ignored .setMaxRpcTimeout(Duration.ZERO) // ignored - .setTotalTimeout(Duration.ofMillis(300000L)) + .setTotalTimeout(Duration.ofMillis(900000L)) .build())); builder .deleteClusterOperationSettings() @@ -512,7 +516,7 @@ private static Builder initDefaults(Builder builder) { .setInitialRpcTimeout(Duration.ZERO) // ignored .setRpcTimeoutMultiplier(1.0) // ignored .setMaxRpcTimeout(Duration.ZERO) // ignored - .setTotalTimeout(Duration.ofMillis(300000L)) + .setTotalTimeout(Duration.ofMillis(900000L)) .build())); builder .diagnoseClusterOperationSettings() diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java new file mode 100644 index 000000000000..a9b43d8820c8 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java @@ -0,0 +1,116 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1.stub; + +import com.google.api.core.BetaApi; +import com.google.api.gax.grpc.GrpcCallSettings; +import com.google.api.gax.grpc.GrpcCallableFactory; +import com.google.api.gax.grpc.GrpcStubCallableFactory; +import com.google.api.gax.rpc.BatchingCallSettings; +import com.google.api.gax.rpc.BidiStreamingCallable; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.ClientStreamingCallable; +import com.google.api.gax.rpc.OperationCallSettings; +import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallable; +import com.google.api.gax.rpc.StreamingCallSettings; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; +import com.google.longrunning.stub.OperationsStub; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS +/** + * gRPC callable factory implementation for Google Cloud Dataproc API. + * + *

This class is for advanced usage. + */ +@Generated("by gapic-generator") +@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") +public class GrpcWorkflowTemplateServiceCallableFactory implements GrpcStubCallableFactory { + @Override + public UnaryCallable createUnaryCallable( + GrpcCallSettings grpcCallSettings, + UnaryCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createUnaryCallable(grpcCallSettings, callSettings, clientContext); + } + + @Override + public + UnaryCallable createPagedCallable( + GrpcCallSettings grpcCallSettings, + PagedCallSettings pagedCallSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createPagedCallable( + grpcCallSettings, pagedCallSettings, clientContext); + } + + @Override + public UnaryCallable createBatchingCallable( + GrpcCallSettings grpcCallSettings, + BatchingCallSettings batchingCallSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createBatchingCallable( + grpcCallSettings, batchingCallSettings, clientContext); + } + + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + @Override + public + OperationCallable createOperationCallable( + GrpcCallSettings grpcCallSettings, + OperationCallSettings operationCallSettings, + ClientContext clientContext, + OperationsStub operationsStub) { + return GrpcCallableFactory.createOperationCallable( + grpcCallSettings, operationCallSettings, clientContext, operationsStub); + } + + @Override + public + BidiStreamingCallable createBidiStreamingCallable( + GrpcCallSettings grpcCallSettings, + StreamingCallSettings streamingCallSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createBidiStreamingCallable( + grpcCallSettings, streamingCallSettings, clientContext); + } + + @Override + public + ServerStreamingCallable createServerStreamingCallable( + GrpcCallSettings grpcCallSettings, + ServerStreamingCallSettings streamingCallSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createServerStreamingCallable( + grpcCallSettings, streamingCallSettings, clientContext); + } + + @Override + public + ClientStreamingCallable createClientStreamingCallable( + GrpcCallSettings grpcCallSettings, + StreamingCallSettings streamingCallSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createClientStreamingCallable( + grpcCallSettings, streamingCallSettings, clientContext); + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java new file mode 100644 index 000000000000..7fc18d3c1794 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java @@ -0,0 +1,372 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1.stub; + +import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; + +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.core.BackgroundResourceAggregation; +import com.google.api.gax.grpc.GrpcCallSettings; +import com.google.api.gax.grpc.GrpcStubCallableFactory; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest; +import com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse; +import com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.WorkflowMetadata; +import com.google.cloud.dataproc.v1.WorkflowTemplate; +import com.google.longrunning.Operation; +import com.google.longrunning.stub.GrpcOperationsStub; +import com.google.protobuf.Empty; +import io.grpc.MethodDescriptor; +import io.grpc.protobuf.ProtoUtils; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS +/** + * gRPC stub implementation for Google Cloud Dataproc API. + * + *

This class is for advanced usage and reflects the underlying API directly. + */ +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub { + + private static final MethodDescriptor + createWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(CreateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); + private static final MethodDescriptor + getWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(GetWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); + private static final MethodDescriptor + instantiateWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(InstantiateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) + .build(); + private static final MethodDescriptor + instantiateInlineWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller( + InstantiateInlineWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) + .build(); + private static final MethodDescriptor + updateWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(UpdateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); + private static final MethodDescriptor + listWorkflowTemplatesMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates") + .setRequestMarshaller( + ProtoUtils.marshaller(ListWorkflowTemplatesRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(ListWorkflowTemplatesResponse.getDefaultInstance())) + .build(); + private static final MethodDescriptor + deleteWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(DeleteWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) + .build(); + + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; + + private final UnaryCallable + createWorkflowTemplateCallable; + private final UnaryCallable + getWorkflowTemplateCallable; + private final UnaryCallable + instantiateWorkflowTemplateCallable; + private final OperationCallable + instantiateWorkflowTemplateOperationCallable; + private final UnaryCallable + instantiateInlineWorkflowTemplateCallable; + private final OperationCallable + instantiateInlineWorkflowTemplateOperationCallable; + private final UnaryCallable + updateWorkflowTemplateCallable; + private final UnaryCallable + listWorkflowTemplatesCallable; + private final UnaryCallable + listWorkflowTemplatesPagedCallable; + private final UnaryCallable deleteWorkflowTemplateCallable; + + private final GrpcStubCallableFactory callableFactory; + + public static final GrpcWorkflowTemplateServiceStub create( + WorkflowTemplateServiceStubSettings settings) throws IOException { + return new GrpcWorkflowTemplateServiceStub(settings, ClientContext.create(settings)); + } + + public static final GrpcWorkflowTemplateServiceStub create(ClientContext clientContext) + throws IOException { + return new GrpcWorkflowTemplateServiceStub( + WorkflowTemplateServiceStubSettings.newBuilder().build(), clientContext); + } + + public static final GrpcWorkflowTemplateServiceStub create( + ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { + return new GrpcWorkflowTemplateServiceStub( + WorkflowTemplateServiceStubSettings.newBuilder().build(), clientContext, callableFactory); + } + + /** + * Constructs an instance of GrpcWorkflowTemplateServiceStub, using the given settings. This is + * protected so that it is easy to make a subclass, but otherwise, the static factory methods + * should be preferred. + */ + protected GrpcWorkflowTemplateServiceStub( + WorkflowTemplateServiceStubSettings settings, ClientContext clientContext) + throws IOException { + this(settings, clientContext, new GrpcWorkflowTemplateServiceCallableFactory()); + } + + /** + * Constructs an instance of GrpcWorkflowTemplateServiceStub, using the given settings. This is + * protected so that it is easy to make a subclass, but otherwise, the static factory methods + * should be preferred. + */ + protected GrpcWorkflowTemplateServiceStub( + WorkflowTemplateServiceStubSettings settings, + ClientContext clientContext, + GrpcStubCallableFactory callableFactory) + throws IOException { + this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); + + GrpcCallSettings + createWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createWorkflowTemplateMethodDescriptor) + .build(); + GrpcCallSettings + getWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getWorkflowTemplateMethodDescriptor) + .build(); + GrpcCallSettings + instantiateWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(instantiateWorkflowTemplateMethodDescriptor) + .build(); + GrpcCallSettings + instantiateInlineWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(instantiateInlineWorkflowTemplateMethodDescriptor) + .build(); + GrpcCallSettings + updateWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(updateWorkflowTemplateMethodDescriptor) + .build(); + GrpcCallSettings + listWorkflowTemplatesTransportSettings = + GrpcCallSettings + .newBuilder() + .setMethodDescriptor(listWorkflowTemplatesMethodDescriptor) + .build(); + GrpcCallSettings deleteWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(deleteWorkflowTemplateMethodDescriptor) + .build(); + + this.createWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + createWorkflowTemplateTransportSettings, + settings.createWorkflowTemplateSettings(), + clientContext); + this.getWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + getWorkflowTemplateTransportSettings, + settings.getWorkflowTemplateSettings(), + clientContext); + this.instantiateWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + instantiateWorkflowTemplateTransportSettings, + settings.instantiateWorkflowTemplateSettings(), + clientContext); + this.instantiateWorkflowTemplateOperationCallable = + callableFactory.createOperationCallable( + instantiateWorkflowTemplateTransportSettings, + settings.instantiateWorkflowTemplateOperationSettings(), + clientContext, + this.operationsStub); + this.instantiateInlineWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + instantiateInlineWorkflowTemplateTransportSettings, + settings.instantiateInlineWorkflowTemplateSettings(), + clientContext); + this.instantiateInlineWorkflowTemplateOperationCallable = + callableFactory.createOperationCallable( + instantiateInlineWorkflowTemplateTransportSettings, + settings.instantiateInlineWorkflowTemplateOperationSettings(), + clientContext, + this.operationsStub); + this.updateWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + updateWorkflowTemplateTransportSettings, + settings.updateWorkflowTemplateSettings(), + clientContext); + this.listWorkflowTemplatesCallable = + callableFactory.createUnaryCallable( + listWorkflowTemplatesTransportSettings, + settings.listWorkflowTemplatesSettings(), + clientContext); + this.listWorkflowTemplatesPagedCallable = + callableFactory.createPagedCallable( + listWorkflowTemplatesTransportSettings, + settings.listWorkflowTemplatesSettings(), + clientContext); + this.deleteWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + deleteWorkflowTemplateTransportSettings, + settings.deleteWorkflowTemplateSettings(), + clientContext); + + backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public GrpcOperationsStub getOperationsStub() { + return operationsStub; + } + + public UnaryCallable + createWorkflowTemplateCallable() { + return createWorkflowTemplateCallable; + } + + public UnaryCallable getWorkflowTemplateCallable() { + return getWorkflowTemplateCallable; + } + + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallable + instantiateWorkflowTemplateOperationCallable() { + return instantiateWorkflowTemplateOperationCallable; + } + + public UnaryCallable + instantiateWorkflowTemplateCallable() { + return instantiateWorkflowTemplateCallable; + } + + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + return instantiateInlineWorkflowTemplateOperationCallable; + } + + public UnaryCallable + instantiateInlineWorkflowTemplateCallable() { + return instantiateInlineWorkflowTemplateCallable; + } + + public UnaryCallable + updateWorkflowTemplateCallable() { + return updateWorkflowTemplateCallable; + } + + public UnaryCallable + listWorkflowTemplatesPagedCallable() { + return listWorkflowTemplatesPagedCallable; + } + + public UnaryCallable + listWorkflowTemplatesCallable() { + return listWorkflowTemplatesCallable; + } + + public UnaryCallable deleteWorkflowTemplateCallable() { + return deleteWorkflowTemplateCallable; + } + + @Override + public final void close() { + shutdown(); + } + + @Override + public void shutdown() { + backgroundResources.shutdown(); + } + + @Override + public boolean isShutdown() { + return backgroundResources.isShutdown(); + } + + @Override + public boolean isTerminated() { + return backgroundResources.isTerminated(); + } + + @Override + public void shutdownNow() { + backgroundResources.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return backgroundResources.awaitTermination(duration, unit); + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java index 8e86809108cf..f52f26746dd4 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java @@ -280,8 +280,12 @@ public static class Builder extends StubSettings.BuildernewArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.newArrayList())); + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.INTERNAL, + StatusCode.Code.UNAVAILABLE))); + definitions.put( + "non_idempotent", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -298,7 +302,7 @@ public static class Builder extends StubSettings.BuilderThis class is for advanced usage and reflects the underlying API directly. + */ +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +public abstract class WorkflowTemplateServiceStub implements BackgroundResource { + + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationsStub getOperationsStub() { + throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); + } + + public UnaryCallable + createWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: createWorkflowTemplateCallable()"); + } + + public UnaryCallable getWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: getWorkflowTemplateCallable()"); + } + + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallable + instantiateWorkflowTemplateOperationCallable() { + throw new UnsupportedOperationException( + "Not implemented: instantiateWorkflowTemplateOperationCallable()"); + } + + public UnaryCallable + instantiateWorkflowTemplateCallable() { + throw new UnsupportedOperationException( + "Not implemented: instantiateWorkflowTemplateCallable()"); + } + + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + throw new UnsupportedOperationException( + "Not implemented: instantiateInlineWorkflowTemplateOperationCallable()"); + } + + public UnaryCallable + instantiateInlineWorkflowTemplateCallable() { + throw new UnsupportedOperationException( + "Not implemented: instantiateInlineWorkflowTemplateCallable()"); + } + + public UnaryCallable + updateWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: updateWorkflowTemplateCallable()"); + } + + public UnaryCallable + listWorkflowTemplatesPagedCallable() { + throw new UnsupportedOperationException( + "Not implemented: listWorkflowTemplatesPagedCallable()"); + } + + public UnaryCallable + listWorkflowTemplatesCallable() { + throw new UnsupportedOperationException("Not implemented: listWorkflowTemplatesCallable()"); + } + + public UnaryCallable deleteWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: deleteWorkflowTemplateCallable()"); + } + + @Override + public abstract void close(); +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java new file mode 100644 index 000000000000..18b8b58d12e7 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java @@ -0,0 +1,639 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1.stub; + +import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; + +import com.google.api.core.ApiFunction; +import com.google.api.core.ApiFuture; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.GaxProperties; +import com.google.api.gax.core.GoogleCredentialsProvider; +import com.google.api.gax.core.InstantiatingExecutorProvider; +import com.google.api.gax.grpc.GaxGrpcProperties; +import com.google.api.gax.grpc.GrpcTransportChannel; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.grpc.ProtoOperationTransformers; +import com.google.api.gax.longrunning.OperationSnapshot; +import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.rpc.ApiCallContext; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.OperationCallSettings; +import com.google.api.gax.rpc.PageContext; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.PagedListDescriptor; +import com.google.api.gax.rpc.PagedListResponseFactory; +import com.google.api.gax.rpc.StatusCode; +import com.google.api.gax.rpc.StubSettings; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.GetWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest; +import com.google.cloud.dataproc.v1.ListWorkflowTemplatesResponse; +import com.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1.WorkflowMetadata; +import com.google.cloud.dataproc.v1.WorkflowTemplate; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.longrunning.Operation; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import javax.annotation.Generated; +import org.threeten.bp.Duration; + +// AUTO-GENERATED DOCUMENTATION AND CLASS +/** + * Settings class to configure an instance of {@link WorkflowTemplateServiceStub}. + * + *

The default instance has everything set to sensible defaults: + * + *

    + *
  • The default service address (dataproc.googleapis.com) and default port (443) are used. + *
  • Credentials are acquired automatically through Application Default Credentials. + *
  • Retries are configured for idempotent methods but not for non-idempotent methods. + *
+ * + *

The builder of this class is recursive, so contained classes are themselves builders. When + * build() is called, the tree of builders is called to create the complete settings object. For + * example, to set the total timeout of createWorkflowTemplate to 30 seconds: + * + *

+ * 
+ * WorkflowTemplateServiceStubSettings.Builder workflowTemplateServiceSettingsBuilder =
+ *     WorkflowTemplateServiceStubSettings.newBuilder();
+ * workflowTemplateServiceSettingsBuilder.createWorkflowTemplateSettings().getRetrySettings().toBuilder()
+ *     .setTotalTimeout(Duration.ofSeconds(30));
+ * WorkflowTemplateServiceStubSettings workflowTemplateServiceSettings = workflowTemplateServiceSettingsBuilder.build();
+ * 
+ * 
+ */ +@Generated("by gapic-generator") +@BetaApi +public class WorkflowTemplateServiceStubSettings + extends StubSettings { + /** The default scopes of the service. */ + private static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder().add("https://www.googleapis.com/auth/cloud-platform").build(); + + private final UnaryCallSettings + createWorkflowTemplateSettings; + private final UnaryCallSettings + getWorkflowTemplateSettings; + private final UnaryCallSettings + instantiateWorkflowTemplateSettings; + private final OperationCallSettings + instantiateWorkflowTemplateOperationSettings; + private final UnaryCallSettings + instantiateInlineWorkflowTemplateSettings; + private final OperationCallSettings< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings; + private final UnaryCallSettings + updateWorkflowTemplateSettings; + private final PagedCallSettings< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + listWorkflowTemplatesSettings; + private final UnaryCallSettings + deleteWorkflowTemplateSettings; + + /** Returns the object with the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings + createWorkflowTemplateSettings() { + return createWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings + getWorkflowTemplateSettings() { + return getWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ + public UnaryCallSettings + instantiateWorkflowTemplateSettings() { + return instantiateWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallSettings + instantiateWorkflowTemplateOperationSettings() { + return instantiateWorkflowTemplateOperationSettings; + } + + /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ + public UnaryCallSettings + instantiateInlineWorkflowTemplateSettings() { + return instantiateInlineWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallSettings + instantiateInlineWorkflowTemplateOperationSettings() { + return instantiateInlineWorkflowTemplateOperationSettings; + } + + /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ + public UnaryCallSettings + updateWorkflowTemplateSettings() { + return updateWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to listWorkflowTemplates. */ + public PagedCallSettings< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + listWorkflowTemplatesSettings() { + return listWorkflowTemplatesSettings; + } + + /** Returns the object with the settings used for calls to deleteWorkflowTemplate. */ + public UnaryCallSettings deleteWorkflowTemplateSettings() { + return deleteWorkflowTemplateSettings; + } + + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + public WorkflowTemplateServiceStub createStub() throws IOException { + if (getTransportChannelProvider() + .getTransportName() + .equals(GrpcTransportChannel.getGrpcTransportName())) { + return GrpcWorkflowTemplateServiceStub.create(this); + } else { + throw new UnsupportedOperationException( + "Transport not supported: " + getTransportChannelProvider().getTransportName()); + } + } + + /** Returns a builder for the default ExecutorProvider for this service. */ + public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { + return InstantiatingExecutorProvider.newBuilder(); + } + + /** Returns the default service endpoint. */ + public static String getDefaultEndpoint() { + return "dataproc.googleapis.com:443"; + } + + /** Returns the default service scopes. */ + public static List getDefaultServiceScopes() { + return DEFAULT_SERVICE_SCOPES; + } + + /** Returns a builder for the default credentials for this service. */ + public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { + return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES); + } + + /** Returns a builder for the default ChannelProvider for this service. */ + public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { + return InstantiatingGrpcChannelProvider.newBuilder(); + } + + public static TransportChannelProvider defaultTransportChannelProvider() { + return defaultGrpcTransportProviderBuilder().build(); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { + return ApiClientHeaderProvider.newBuilder() + .setGeneratedLibToken( + "gapic", GaxProperties.getLibraryVersion(WorkflowTemplateServiceStubSettings.class)) + .setTransportToken( + GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder() { + return Builder.createDefault(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder(ClientContext clientContext) { + return new Builder(clientContext); + } + + /** Returns a builder containing all the values of this settings class. */ + public Builder toBuilder() { + return new Builder(this); + } + + protected WorkflowTemplateServiceStubSettings(Builder settingsBuilder) throws IOException { + super(settingsBuilder); + + createWorkflowTemplateSettings = settingsBuilder.createWorkflowTemplateSettings().build(); + getWorkflowTemplateSettings = settingsBuilder.getWorkflowTemplateSettings().build(); + instantiateWorkflowTemplateSettings = + settingsBuilder.instantiateWorkflowTemplateSettings().build(); + instantiateWorkflowTemplateOperationSettings = + settingsBuilder.instantiateWorkflowTemplateOperationSettings().build(); + instantiateInlineWorkflowTemplateSettings = + settingsBuilder.instantiateInlineWorkflowTemplateSettings().build(); + instantiateInlineWorkflowTemplateOperationSettings = + settingsBuilder.instantiateInlineWorkflowTemplateOperationSettings().build(); + updateWorkflowTemplateSettings = settingsBuilder.updateWorkflowTemplateSettings().build(); + listWorkflowTemplatesSettings = settingsBuilder.listWorkflowTemplatesSettings().build(); + deleteWorkflowTemplateSettings = settingsBuilder.deleteWorkflowTemplateSettings().build(); + } + + private static final PagedListDescriptor< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> + LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC = + new PagedListDescriptor< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListWorkflowTemplatesRequest injectToken( + ListWorkflowTemplatesRequest payload, String token) { + return ListWorkflowTemplatesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListWorkflowTemplatesRequest injectPageSize( + ListWorkflowTemplatesRequest payload, int pageSize) { + return ListWorkflowTemplatesRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListWorkflowTemplatesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListWorkflowTemplatesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListWorkflowTemplatesResponse payload) { + return payload.getTemplatesList() != null + ? payload.getTemplatesList() + : ImmutableList.of(); + } + }; + + private static final PagedListResponseFactory< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListWorkflowTemplatesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> + pageContext = + PageContext.create( + callable, LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC, request, context); + return ListWorkflowTemplatesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + /** Builder for WorkflowTemplateServiceStubSettings. */ + public static class Builder + extends StubSettings.Builder { + private final ImmutableList> unaryMethodSettingsBuilders; + + private final UnaryCallSettings.Builder + createWorkflowTemplateSettings; + private final UnaryCallSettings.Builder + getWorkflowTemplateSettings; + private final UnaryCallSettings.Builder + instantiateWorkflowTemplateSettings; + private final OperationCallSettings.Builder< + InstantiateWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateWorkflowTemplateOperationSettings; + private final UnaryCallSettings.Builder + instantiateInlineWorkflowTemplateSettings; + private final OperationCallSettings.Builder< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings; + private final UnaryCallSettings.Builder + updateWorkflowTemplateSettings; + private final PagedCallSettings.Builder< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + listWorkflowTemplatesSettings; + private final UnaryCallSettings.Builder + deleteWorkflowTemplateSettings; + + private static final ImmutableMap> + RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = + ImmutableMap.builder(); + definitions.put( + "idempotent", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.INTERNAL, + StatusCode.Code.UNAVAILABLE))); + definitions.put( + "non_idempotent", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetrySettings settings = null; + settings = + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(100L)) + .setRetryDelayMultiplier(1.3) + .setMaxRetryDelay(Duration.ofMillis(60000L)) + .setInitialRpcTimeout(Duration.ofMillis(20000L)) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ofMillis(20000L)) + .setTotalTimeout(Duration.ofMillis(600000L)) + .build(); + definitions.put("default", settings); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + protected Builder() { + this((ClientContext) null); + } + + protected Builder(ClientContext clientContext) { + super(clientContext); + + createWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + getWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + instantiateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + instantiateWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); + + instantiateInlineWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + instantiateInlineWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); + + updateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + listWorkflowTemplatesSettings = + PagedCallSettings.newBuilder(LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT); + + deleteWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createWorkflowTemplateSettings, + getWorkflowTemplateSettings, + instantiateWorkflowTemplateSettings, + instantiateInlineWorkflowTemplateSettings, + updateWorkflowTemplateSettings, + listWorkflowTemplatesSettings, + deleteWorkflowTemplateSettings); + + initDefaults(this); + } + + private static Builder createDefault() { + Builder builder = new Builder((ClientContext) null); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); + builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); + builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); + builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); + } + + private static Builder initDefaults(Builder builder) { + + builder + .createWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + + builder + .getWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + + builder + .instantiateWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + + builder + .instantiateInlineWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + + builder + .updateWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + + builder + .listWorkflowTemplatesSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + + builder + .deleteWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + builder + .instantiateWorkflowTemplateOperationSettings() + .setInitialCallSettings( + UnaryCallSettings + . + newUnaryCallSettingsBuilder() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")) + .build()) + .setResponseTransformer( + ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) + .setMetadataTransformer( + ProtoOperationTransformers.MetadataTransformer.create(WorkflowMetadata.class)) + .setPollingAlgorithm( + OperationTimedPollAlgorithm.create( + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(1000L)) + .setRetryDelayMultiplier(2.0) + .setMaxRetryDelay(Duration.ofMillis(10000L)) + .setInitialRpcTimeout(Duration.ZERO) // ignored + .setRpcTimeoutMultiplier(1.0) // ignored + .setMaxRpcTimeout(Duration.ZERO) // ignored + .setTotalTimeout(Duration.ofMillis(43200000L)) + .build())); + builder + .instantiateInlineWorkflowTemplateOperationSettings() + .setInitialCallSettings( + UnaryCallSettings + . + newUnaryCallSettingsBuilder() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")) + .build()) + .setResponseTransformer( + ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) + .setMetadataTransformer( + ProtoOperationTransformers.MetadataTransformer.create(WorkflowMetadata.class)) + .setPollingAlgorithm( + OperationTimedPollAlgorithm.create( + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(1000L)) + .setRetryDelayMultiplier(2.0) + .setMaxRetryDelay(Duration.ofMillis(10000L)) + .setInitialRpcTimeout(Duration.ZERO) // ignored + .setRpcTimeoutMultiplier(1.0) // ignored + .setMaxRpcTimeout(Duration.ZERO) // ignored + .setTotalTimeout(Duration.ofMillis(43200000L)) + .build())); + + return builder; + } + + protected Builder(WorkflowTemplateServiceStubSettings settings) { + super(settings); + + createWorkflowTemplateSettings = settings.createWorkflowTemplateSettings.toBuilder(); + getWorkflowTemplateSettings = settings.getWorkflowTemplateSettings.toBuilder(); + instantiateWorkflowTemplateSettings = + settings.instantiateWorkflowTemplateSettings.toBuilder(); + instantiateWorkflowTemplateOperationSettings = + settings.instantiateWorkflowTemplateOperationSettings.toBuilder(); + instantiateInlineWorkflowTemplateSettings = + settings.instantiateInlineWorkflowTemplateSettings.toBuilder(); + instantiateInlineWorkflowTemplateOperationSettings = + settings.instantiateInlineWorkflowTemplateOperationSettings.toBuilder(); + updateWorkflowTemplateSettings = settings.updateWorkflowTemplateSettings.toBuilder(); + listWorkflowTemplatesSettings = settings.listWorkflowTemplatesSettings.toBuilder(); + deleteWorkflowTemplateSettings = settings.deleteWorkflowTemplateSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createWorkflowTemplateSettings, + getWorkflowTemplateSettings, + instantiateWorkflowTemplateSettings, + instantiateInlineWorkflowTemplateSettings, + updateWorkflowTemplateSettings, + listWorkflowTemplatesSettings, + deleteWorkflowTemplateSettings); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' + /** + * Applies the given settings updater function to all of the unary API methods in this service. + * + *

Note: This method does not support applying settings to streaming methods. + */ + public Builder applyToAllUnaryMethods( + ApiFunction, Void> settingsUpdater) throws Exception { + super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); + return this; + } + + public ImmutableList> unaryMethodSettingsBuilders() { + return unaryMethodSettingsBuilders; + } + + /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings.Builder + createWorkflowTemplateSettings() { + return createWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings.Builder + getWorkflowTemplateSettings() { + return getWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ + public UnaryCallSettings.Builder + instantiateWorkflowTemplateSettings() { + return instantiateWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ + @BetaApi( + "The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallSettings.Builder< + InstantiateWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateWorkflowTemplateOperationSettings() { + return instantiateWorkflowTemplateOperationSettings; + } + + /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ + public UnaryCallSettings.Builder + instantiateInlineWorkflowTemplateSettings() { + return instantiateInlineWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ + @BetaApi( + "The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallSettings.Builder< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings() { + return instantiateInlineWorkflowTemplateOperationSettings; + } + + /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ + public UnaryCallSettings.Builder + updateWorkflowTemplateSettings() { + return updateWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to listWorkflowTemplates. */ + public PagedCallSettings.Builder< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + listWorkflowTemplatesSettings() { + return listWorkflowTemplatesSettings; + } + + /** Returns the builder for the settings used for calls to deleteWorkflowTemplate. */ + public UnaryCallSettings.Builder + deleteWorkflowTemplateSettings() { + return deleteWorkflowTemplateSettings; + } + + @Override + public WorkflowTemplateServiceStubSettings build() throws IOException { + return new WorkflowTemplateServiceStubSettings(this); + } + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java index 4b7542fe9f0c..5c6a236cdef7 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java @@ -32,6 +32,7 @@ import com.google.longrunning.Operation; import com.google.longrunning.OperationsClient; import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; @@ -293,6 +294,69 @@ public final UnaryCallable createClusterCallabl return stub.createClusterCallable(); } + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Updates a cluster in a project. + * + *

Sample code: + * + *


+   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
+   *   String projectId = "";
+   *   String region = "";
+   *   String clusterName = "";
+   *   Cluster cluster = Cluster.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Cluster response = clusterControllerClient.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get();
+   * }
+   * 
+ * + * @param projectId Required. The ID of the Google Cloud Platform project the cluster belongs to. + * @param region Required. The Cloud Dataproc region in which to handle the request. + * @param clusterName Required. The cluster name. + * @param cluster Required. The changes to the cluster. + * @param updateMask Required. Specifies the path, relative to `Cluster`, of the field to update. + * For example, to change the number of workers in a cluster to 5, the `update_mask` parameter + * would be specified as `config.worker_config.num_instances`, and the `PATCH` request body + * would specify the new value, as follows: + *

{ "config":{ "workerConfig":{ "numInstances":"5" } } } + *

Similarly, to change the number of preemptible workers in a cluster to 5, the + * `update_mask` parameter would be `config.secondary_worker_config.num_instances`, and the + * `PATCH` request body would be set as follows: + *

{ "config":{ "secondaryWorkerConfig":{ "numInstances":"5" } } } + * <strong>Note:</strong> currently only the following fields can be updated: + *

<table> <tr> + * <td><strong>Mask</strong></td><td><strong>Purpose</strong></td> + * </tr> <tr> <td>labels</td><td>Updates labels</td> + * </tr> <tr> + * <td>config.worker_config.num_instances</td><td>Resize primary worker + * group</td> </tr> <tr> + * <td>config.secondary_worker_config.num_instances</td><td>Resize secondary + * worker group</td> </tr> <tr> + * <td>config.lifecycle_config.auto_delete_ttl</td><td>Reset MAX TTL + * duration</td> </tr> <tr> + * <td>config.lifecycle_config.auto_delete_time</td><td>Update MAX TTL + * deletion timestamp</td> </tr> <tr> + * <td>config.lifecycle_config.idle_delete_ttl</td><td>Update Idle TTL + * duration</td> </tr> </table> + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture updateClusterAsync( + String projectId, String region, String clusterName, Cluster cluster, FieldMask updateMask) { + + UpdateClusterRequest request = + UpdateClusterRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setClusterName(clusterName) + .setCluster(cluster) + .setUpdateMask(updateMask) + .build(); + return updateClusterAsync(request); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Updates a cluster in a project. @@ -620,6 +684,53 @@ public final ListClustersPagedResponse listClusters(String projectId, String reg return listClusters(request); } + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists all regions/{region}/clusters in a project. + * + *

Sample code: + * + *


+   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
+   *   String projectId = "";
+   *   String region = "";
+   *   String filter = "";
+   *   for (Cluster element : clusterControllerClient.listClusters(projectId, region, filter).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ * + * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs + * to. + * @param region Required. The Cloud Dataproc region in which to handle the request. + * @param filter Optional. A filter constraining the clusters to list. Filters are case-sensitive + * and have the following syntax: + *

field = value [AND [field = value]] ... + *

where **field** is one of `status.state`, `clusterName`, or + * `labels.[KEY]`, and `[KEY]` is a label key. **value** can be `*` to + * match all values. `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` contains the + * `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` contains the `DELETING` and + * `ERROR` states. `clusterName` is the name of the cluster provided at creation time. Only + * the logical `AND` operator is supported; space-separated items are treated as having an + * implicit `AND` operator. + *

Example filter: + *

status.state = ACTIVE AND clusterName = mycluster AND labels.env = staging AND + * labels.starred = * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListClustersPagedResponse listClusters( + String projectId, String region, String filter) { + ListClustersRequest request = + ListClustersRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setFilter(filter) + .build(); + return listClusters(request); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists all regions/{region}/clusters in a project. diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java index 8729b99b82ed..74c8461c03e8 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java @@ -345,6 +345,46 @@ public final ListJobsPagedResponse listJobs(String projectId, String region) { return listJobs(request); } + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Lists regions/{region}/jobs in a project. + * + *

Sample code: + * + *


+   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
+   *   String projectId = "";
+   *   String region = "";
+   *   String filter = "";
+   *   for (Job element : jobControllerClient.listJobs(projectId, region, filter).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * 
+ * + * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. + * @param region Required. The Cloud Dataproc region in which to handle the request. + * @param filter Optional. A filter constraining the jobs to list. Filters are case-sensitive and + * have the following syntax: + *

[field = value] AND [field [= value]] ... + *

where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a + * label key. **value** can be `*` to match all values. `status.state` can + * be either `ACTIVE` or `NON_ACTIVE`. Only the logical `AND` operator is supported; + * space-separated items are treated as having an implicit `AND` operator. + *

Example filter: + *

status.state = ACTIVE AND labels.env = staging AND labels.starred = * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListJobsPagedResponse listJobs(String projectId, String region, String filter) { + ListJobsRequest request = + ListJobsRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setFilter(filter) + .build(); + return listJobs(request); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Lists regions/{region}/jobs in a project. diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java index 5d1bf1e1d638..d3a72f7906e2 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java @@ -34,6 +34,7 @@ import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; @@ -47,9 +48,9 @@ *

  * 
  * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+ *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
  *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
- *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
+ *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(formattedParent, template);
  * }
  * 
  * 
@@ -181,40 +182,9 @@ public final OperationsClient getOperationsClient() { * *

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
    *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
-   * }
-   * 
- * - * @param parent Required. The "resource name" of the region, as described in - * https://cloud.google.com/apis/design/resource_names of the form - * `projects/{project_id}/regions/{region}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate( - RegionName parent, WorkflowTemplate template) { - - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) - .setTemplate(template) - .build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent.toString(), template);
+   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(formattedParent, template);
    * }
    * 
* @@ -239,10 +209,10 @@ public final WorkflowTemplate createWorkflowTemplate(String parent, WorkflowTemp * *

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
    *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
    *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
+   *     .setParent(formattedParent)
    *     .setTemplate(template)
    *     .build();
    *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(request);
@@ -264,10 +234,10 @@ public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateReque
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
    *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
    *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
+   *     .setParent(formattedParent)
    *     .setTemplate(template)
    *     .build();
    *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.createWorkflowTemplateCallable().futureCall(request);
@@ -291,37 +261,8 @@ public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateReque
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(name);
-   * }
-   * 
- * - * @param name Required. The "resource name" of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names of the form - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(WorkflowTemplateName name) { - - GetWorkflowTemplateRequest request = - GetWorkflowTemplateRequest.newBuilder() - .setName(name == null ? null : name.toString()) - .build(); - return getWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(name.toString());
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(formattedName);
    * }
    * 
* @@ -347,9 +288,9 @@ public final WorkflowTemplate getWorkflowTemplate(String name) { * *

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(request);
    * }
@@ -372,9 +313,9 @@ public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest req
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.getWorkflowTemplateCallable().futureCall(request);
    *   // Do something
@@ -409,33 +350,23 @@ public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest req
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   String instanceId = "";
-   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name, instanceId).get();
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(formattedName).get();
    * }
    * 
* * @param name Required. The "resource name" of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names of the form * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * @param instanceId Optional. A tag that prevents multiple concurrent workflow instances with the - * same tag from running. This mitigates risk of concurrent instances started due to retries. - *

It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - *

The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and - * hyphens (-). The maximum length is 40 characters. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @BetaApi( "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( - WorkflowTemplateName name, String instanceId) { + String name) { InstantiateWorkflowTemplateRequest request = - InstantiateWorkflowTemplateRequest.newBuilder() - .setName(name == null ? null : name.toString()) - .setInstanceId(instanceId) - .build(); + InstantiateWorkflowTemplateRequest.newBuilder().setName(name).build(); return instantiateWorkflowTemplateAsync(request); } @@ -461,33 +392,22 @@ public final OperationFuture instantiateWorkflowTemplat * *


    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   String instanceId = "";
-   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name.toString(), instanceId).get();
+   *   Map<String, String> parameters = new HashMap<>();
+   *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(parameters).get();
    * }
    * 
* - * @param name Required. The "resource name" of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names of the form - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * @param instanceId Optional. A tag that prevents multiple concurrent workflow instances with the - * same tag from running. This mitigates risk of concurrent instances started due to retries. - *

It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - *

The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and - * hyphens (-). The maximum length is 40 characters. + * @param parameters Optional. Map from parameter names to values that should be used for those + * parameters. Values may not exceed 100 characters. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @BetaApi( "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( - String name, String instanceId) { + Map parameters) { InstantiateWorkflowTemplateRequest request = - InstantiateWorkflowTemplateRequest.newBuilder() - .setName(name) - .setInstanceId(instanceId) - .build(); + InstantiateWorkflowTemplateRequest.newBuilder().putAllParameters(parameters).build(); return instantiateWorkflowTemplateAsync(request); } @@ -513,9 +433,9 @@ public final OperationFuture instantiateWorkflowTemplat * *


    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   Empty response = workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(request).get();
    * }
@@ -553,9 +473,9 @@ public final OperationFuture instantiateWorkflowTemplat
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   OperationFuture<Operation> future = workflowTemplateServiceClient.instantiateWorkflowTemplateOperationCallable().futureCall(request);
    *   // Do something
@@ -591,9 +511,9 @@ public final OperationFuture instantiateWorkflowTemplat
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateWorkflowTemplateCallable().futureCall(request);
    *   // Do something
@@ -606,6 +526,194 @@ public final OperationFuture instantiateWorkflowTemplat
     return stub.instantiateWorkflowTemplateCallable();
   }
 
+  // AUTO-GENERATED DOCUMENTATION AND METHOD
+  /**
+   * Instantiates a template and begins execution.
+   *
+   * 

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   Empty response = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(formattedParent, template).get();
+   * }
+   * 
+ * + * @param parent Required. The "resource name" of the workflow template region, as described in + * https://cloud.google.com/apis/design/resource_names of the form + * `projects/{project_id}/regions/{region}` + * @param template Required. The workflow template to instantiate. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateInlineWorkflowTemplateAsync( + String parent, WorkflowTemplate template) { + + InstantiateInlineWorkflowTemplateRequest request = + InstantiateInlineWorkflowTemplateRequest.newBuilder() + .setParent(parent) + .setTemplate(template) + .build(); + return instantiateInlineWorkflowTemplateAsync(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   Empty response = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(request).get();
+   * }
+   * 
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + public final OperationFuture instantiateInlineWorkflowTemplateAsync( + InstantiateInlineWorkflowTemplateRequest request) { + return instantiateInlineWorkflowTemplateOperationCallable().futureCall(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   OperationFuture<Operation> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateOperationCallable().futureCall(request);
+   *   // Do something
+   *   Empty response = future.get();
+   * }
+   * 
+ */ + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public final OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + return stub.instantiateInlineWorkflowTemplateOperationCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD + /** + * Instantiates a template and begins execution. + * + *

This method is equivalent to executing the sequence + * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. + * + *

The returned Operation can be used to track execution of workflow by polling + * [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when + * entire workflow is finished. + * + *

The running workflow can be aborted via + * [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any + * inflight jobs to be cancelled and workflow-owned clusters to be deleted. + * + *

The [Operation.metadata][google.longrunning.Operation.metadata] will be + * [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + * + *

On successful completion, [Operation.response][google.longrunning.Operation.response] will + * be [Empty][google.protobuf.Empty]. + * + *

Sample code: + * + *


+   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
+   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
+   *     .setParent(formattedParent)
+   *     .setTemplate(template)
+   *     .build();
+   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateCallable().futureCall(request);
+   *   // Do something
+   *   future.get();
+   * }
+   * 
+ */ + public final UnaryCallable + instantiateInlineWorkflowTemplateCallable() { + return stub.instantiateInlineWorkflowTemplateCallable(); + } + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Updates (replaces) workflow template. The updated template must contain version that matches @@ -687,36 +795,8 @@ public final WorkflowTemplate updateWorkflowTemplate(UpdateWorkflowTemplateReque * *

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * - * @param parent Required. The "resource name" of the region, as described in - * https://cloud.google.com/apis/design/resource_names of the form - * `projects/{project_id}/regions/{region}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName parent) { - ListWorkflowTemplatesRequest request = - ListWorkflowTemplatesRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) - .build(); - return listWorkflowTemplates(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Lists workflows that match the specified filter in the request. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent.toString()).iterateAll()) {
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
+   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(formattedParent).iterateAll()) {
    *     // doThingsWith(element);
    *   }
    * }
@@ -741,9 +821,9 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(String par
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
    *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
+   *     .setParent(formattedParent)
    *     .build();
    *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(request).iterateAll()) {
    *     // doThingsWith(element);
@@ -767,9 +847,9 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
    *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
+   *     .setParent(formattedParent)
    *     .build();
    *   ApiFuture<ListWorkflowTemplatesPagedResponse> future = workflowTemplateServiceClient.listWorkflowTemplatesPagedCallable().futureCall(request);
    *   // Do something
@@ -792,9 +872,9 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+   *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
    *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
+   *     .setParent(formattedParent)
    *     .build();
    *   while (true) {
    *     ListWorkflowTemplatesResponse response = workflowTemplateServiceClient.listWorkflowTemplatesCallable().call(request);
@@ -824,35 +904,8 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(name);
-   * }
-   * 
- * - * @param name Required. The "resource name" of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names of the form - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final void deleteWorkflowTemplate(WorkflowTemplateName name) { - - DeleteWorkflowTemplateRequest request = - DeleteWorkflowTemplateRequest.newBuilder() - .setName(name == null ? null : name.toString()) - .build(); - deleteWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Deletes a workflow template. It does not cancel in-progress workflows. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(name.toString());
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   workflowTemplateServiceClient.deleteWorkflowTemplate(formattedName);
    * }
    * 
* @@ -876,9 +929,9 @@ public final void deleteWorkflowTemplate(String name) { * *

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   workflowTemplateServiceClient.deleteWorkflowTemplate(request);
    * }
@@ -899,9 +952,9 @@ public final void deleteWorkflowTemplate(DeleteWorkflowTemplateRequest request)
    *
    * 

    * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
+   *   String formattedName = WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
    *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
+   *     .setName(formattedName)
    *     .build();
    *   ApiFuture<Void> future = workflowTemplateServiceClient.deleteWorkflowTemplateCallable().futureCall(request);
    *   // Do something
diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java
index 62611fdf75cc..3c7ebf62eee9 100644
--- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java
+++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java
@@ -95,6 +95,22 @@ public class WorkflowTemplateServiceSettings
         .instantiateWorkflowTemplateOperationSettings();
   }
 
+  /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */
+  public UnaryCallSettings
+      instantiateInlineWorkflowTemplateSettings() {
+    return ((WorkflowTemplateServiceStubSettings) getStubSettings())
+        .instantiateInlineWorkflowTemplateSettings();
+  }
+
+  /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */
+  @BetaApi(
+      "The surface for long-running operations is not stable yet and may change in the future.")
+  public OperationCallSettings
+      instantiateInlineWorkflowTemplateOperationSettings() {
+    return ((WorkflowTemplateServiceStubSettings) getStubSettings())
+        .instantiateInlineWorkflowTemplateOperationSettings();
+  }
+
   /** Returns the object with the settings used for calls to updateWorkflowTemplate. */
   public UnaryCallSettings
       updateWorkflowTemplateSettings() {
@@ -242,6 +258,21 @@ public Builder applyToAllUnaryMethods(
       return getStubSettingsBuilder().instantiateWorkflowTemplateOperationSettings();
     }
 
+    /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */
+    public UnaryCallSettings.Builder
+        instantiateInlineWorkflowTemplateSettings() {
+      return getStubSettingsBuilder().instantiateInlineWorkflowTemplateSettings();
+    }
+
+    /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */
+    @BetaApi(
+        "The surface for long-running operations is not stable yet and may change in the future.")
+    public OperationCallSettings.Builder<
+            InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata>
+        instantiateInlineWorkflowTemplateOperationSettings() {
+      return getStubSettingsBuilder().instantiateInlineWorkflowTemplateOperationSettings();
+    }
+
     /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */
     public UnaryCallSettings.Builder
         updateWorkflowTemplateSettings() {
diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java
index b648f64c96e4..4a8f89efb1f2 100644
--- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java
+++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java
@@ -64,9 +64,9 @@
  * 
  * 
  * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
+ *   String formattedParent = RegionName.format("[PROJECT]", "[REGION]");
  *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
- *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
+ *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(formattedParent, template);
  * }
  * 
  * 
diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java index 5e1da5df3131..8b55df3b079b 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java @@ -342,8 +342,12 @@ public static class Builder extends StubSettings.BuildernewArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.newArrayList())); + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.INTERNAL, + StatusCode.Code.UNAVAILABLE))); + definitions.put( + "non_idempotent", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -428,7 +432,7 @@ private static Builder initDefaults(Builder builder) { builder .deleteClusterSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder @@ -466,7 +470,7 @@ private static Builder initDefaults(Builder builder) { .setInitialRpcTimeout(Duration.ZERO) // ignored .setRpcTimeoutMultiplier(1.0) // ignored .setMaxRpcTimeout(Duration.ZERO) // ignored - .setTotalTimeout(Duration.ofMillis(300000L)) + .setTotalTimeout(Duration.ofMillis(900000L)) .build())); builder .updateClusterOperationSettings() @@ -489,7 +493,7 @@ private static Builder initDefaults(Builder builder) { .setInitialRpcTimeout(Duration.ZERO) // ignored .setRpcTimeoutMultiplier(1.0) // ignored .setMaxRpcTimeout(Duration.ZERO) // ignored - .setTotalTimeout(Duration.ofMillis(300000L)) + .setTotalTimeout(Duration.ofMillis(900000L)) .build())); builder .deleteClusterOperationSettings() @@ -512,7 +516,7 @@ private static Builder initDefaults(Builder builder) { .setInitialRpcTimeout(Duration.ZERO) // ignored .setRpcTimeoutMultiplier(1.0) // ignored .setMaxRpcTimeout(Duration.ZERO) // ignored - .setTotalTimeout(Duration.ofMillis(300000L)) + .setTotalTimeout(Duration.ofMillis(900000L)) .build())); builder .diagnoseClusterOperationSettings() diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java index ca8eae106bd4..73257a772cd5 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java @@ -28,6 +28,7 @@ import com.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest; import com.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse; @@ -83,6 +84,17 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub ProtoUtils.marshaller(InstantiateWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor + instantiateInlineWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller( + InstantiateInlineWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) + .build(); private static final MethodDescriptor updateWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -126,6 +138,10 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub instantiateWorkflowTemplateCallable; private final OperationCallable instantiateWorkflowTemplateOperationCallable; + private final UnaryCallable + instantiateInlineWorkflowTemplateCallable; + private final OperationCallable + instantiateInlineWorkflowTemplateOperationCallable; private final UnaryCallable updateWorkflowTemplateCallable; private final UnaryCallable @@ -192,6 +208,11 @@ protected GrpcWorkflowTemplateServiceStub( GrpcCallSettings.newBuilder() .setMethodDescriptor(instantiateWorkflowTemplateMethodDescriptor) .build(); + GrpcCallSettings + instantiateInlineWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(instantiateInlineWorkflowTemplateMethodDescriptor) + .build(); GrpcCallSettings updateWorkflowTemplateTransportSettings = GrpcCallSettings.newBuilder() @@ -229,6 +250,17 @@ protected GrpcWorkflowTemplateServiceStub( settings.instantiateWorkflowTemplateOperationSettings(), clientContext, this.operationsStub); + this.instantiateInlineWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + instantiateInlineWorkflowTemplateTransportSettings, + settings.instantiateInlineWorkflowTemplateSettings(), + clientContext); + this.instantiateInlineWorkflowTemplateOperationCallable = + callableFactory.createOperationCallable( + instantiateInlineWorkflowTemplateTransportSettings, + settings.instantiateInlineWorkflowTemplateOperationSettings(), + clientContext, + this.operationsStub); this.updateWorkflowTemplateCallable = callableFactory.createUnaryCallable( updateWorkflowTemplateTransportSettings, @@ -278,6 +310,17 @@ public UnaryCallable getWorkflowTe return instantiateWorkflowTemplateCallable; } + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + return instantiateInlineWorkflowTemplateOperationCallable; + } + + public UnaryCallable + instantiateInlineWorkflowTemplateCallable() { + return instantiateInlineWorkflowTemplateCallable; + } + public UnaryCallable updateWorkflowTemplateCallable() { return updateWorkflowTemplateCallable; diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java index e645d2d7e28f..26e66244b93e 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java @@ -280,8 +280,12 @@ public static class Builder extends StubSettings.BuildernewArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.newArrayList())); + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.INTERNAL, + StatusCode.Code.UNAVAILABLE))); + definitions.put( + "non_idempotent", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -368,12 +372,12 @@ private static Builder initDefaults(Builder builder) { builder .cancelJobSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder .deleteJobSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); return builder; diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java index f54b4da975a5..b4b0caf43ea4 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java @@ -24,6 +24,7 @@ import com.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest; import com.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse; @@ -72,6 +73,19 @@ public UnaryCallable getWorkflowTe "Not implemented: instantiateWorkflowTemplateCallable()"); } + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + throw new UnsupportedOperationException( + "Not implemented: instantiateInlineWorkflowTemplateOperationCallable()"); + } + + public UnaryCallable + instantiateInlineWorkflowTemplateCallable() { + throw new UnsupportedOperationException( + "Not implemented: instantiateInlineWorkflowTemplateCallable()"); + } + public UnaryCallable updateWorkflowTemplateCallable() { throw new UnsupportedOperationException("Not implemented: updateWorkflowTemplateCallable()"); diff --git a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java index 07e55b175947..ad6a8c0b8728 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java +++ b/google-cloud-clients/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java @@ -46,6 +46,7 @@ import com.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest; +import com.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest; import com.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest; import com.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse; @@ -105,6 +106,11 @@ public class WorkflowTemplateServiceStubSettings instantiateWorkflowTemplateSettings; private final OperationCallSettings instantiateWorkflowTemplateOperationSettings; + private final UnaryCallSettings + instantiateInlineWorkflowTemplateSettings; + private final OperationCallSettings< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings; private final UnaryCallSettings updateWorkflowTemplateSettings; private final PagedCallSettings< @@ -139,6 +145,19 @@ public class WorkflowTemplateServiceStubSettings return instantiateWorkflowTemplateOperationSettings; } + /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ + public UnaryCallSettings + instantiateInlineWorkflowTemplateSettings() { + return instantiateInlineWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ + @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallSettings + instantiateInlineWorkflowTemplateOperationSettings() { + return instantiateInlineWorkflowTemplateOperationSettings; + } + /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings updateWorkflowTemplateSettings() { @@ -232,6 +251,10 @@ protected WorkflowTemplateServiceStubSettings(Builder settingsBuilder) throws IO settingsBuilder.instantiateWorkflowTemplateSettings().build(); instantiateWorkflowTemplateOperationSettings = settingsBuilder.instantiateWorkflowTemplateOperationSettings().build(); + instantiateInlineWorkflowTemplateSettings = + settingsBuilder.instantiateInlineWorkflowTemplateSettings().build(); + instantiateInlineWorkflowTemplateOperationSettings = + settingsBuilder.instantiateInlineWorkflowTemplateOperationSettings().build(); updateWorkflowTemplateSettings = settingsBuilder.updateWorkflowTemplateSettings().build(); listWorkflowTemplatesSettings = settingsBuilder.listWorkflowTemplatesSettings().build(); deleteWorkflowTemplateSettings = settingsBuilder.deleteWorkflowTemplateSettings().build(); @@ -314,6 +337,11 @@ public static class Builder private final OperationCallSettings.Builder< InstantiateWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateWorkflowTemplateOperationSettings; + private final UnaryCallSettings.Builder + instantiateInlineWorkflowTemplateSettings; + private final OperationCallSettings.Builder< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings; private final UnaryCallSettings.Builder updateWorkflowTemplateSettings; private final PagedCallSettings.Builder< @@ -333,8 +361,12 @@ public static class Builder "idempotent", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.newArrayList())); + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.INTERNAL, + StatusCode.Code.UNAVAILABLE))); + definitions.put( + "non_idempotent", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -372,6 +404,10 @@ protected Builder(ClientContext clientContext) { instantiateWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); + instantiateInlineWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + + instantiateInlineWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); + updateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listWorkflowTemplatesSettings = @@ -384,6 +420,7 @@ protected Builder(ClientContext clientContext) { createWorkflowTemplateSettings, getWorkflowTemplateSettings, instantiateWorkflowTemplateSettings, + instantiateInlineWorkflowTemplateSettings, updateWorkflowTemplateSettings, listWorkflowTemplatesSettings, deleteWorkflowTemplateSettings); @@ -417,9 +454,14 @@ private static Builder initDefaults(Builder builder) { .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + builder + .instantiateInlineWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); + builder .updateWorkflowTemplateSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder @@ -429,7 +471,7 @@ private static Builder initDefaults(Builder builder) { builder .deleteWorkflowTemplateSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder .instantiateWorkflowTemplateOperationSettings() @@ -455,6 +497,30 @@ private static Builder initDefaults(Builder builder) { .setMaxRpcTimeout(Duration.ZERO) // ignored .setTotalTimeout(Duration.ofMillis(43200000L)) .build())); + builder + .instantiateInlineWorkflowTemplateOperationSettings() + .setInitialCallSettings( + UnaryCallSettings + . + newUnaryCallSettingsBuilder() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")) + .build()) + .setResponseTransformer( + ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) + .setMetadataTransformer( + ProtoOperationTransformers.MetadataTransformer.create(WorkflowMetadata.class)) + .setPollingAlgorithm( + OperationTimedPollAlgorithm.create( + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(1000L)) + .setRetryDelayMultiplier(2.0) + .setMaxRetryDelay(Duration.ofMillis(10000L)) + .setInitialRpcTimeout(Duration.ZERO) // ignored + .setRpcTimeoutMultiplier(1.0) // ignored + .setMaxRpcTimeout(Duration.ZERO) // ignored + .setTotalTimeout(Duration.ofMillis(43200000L)) + .build())); return builder; } @@ -468,6 +534,10 @@ protected Builder(WorkflowTemplateServiceStubSettings settings) { settings.instantiateWorkflowTemplateSettings.toBuilder(); instantiateWorkflowTemplateOperationSettings = settings.instantiateWorkflowTemplateOperationSettings.toBuilder(); + instantiateInlineWorkflowTemplateSettings = + settings.instantiateInlineWorkflowTemplateSettings.toBuilder(); + instantiateInlineWorkflowTemplateOperationSettings = + settings.instantiateInlineWorkflowTemplateOperationSettings.toBuilder(); updateWorkflowTemplateSettings = settings.updateWorkflowTemplateSettings.toBuilder(); listWorkflowTemplatesSettings = settings.listWorkflowTemplatesSettings.toBuilder(); deleteWorkflowTemplateSettings = settings.deleteWorkflowTemplateSettings.toBuilder(); @@ -477,6 +547,7 @@ protected Builder(WorkflowTemplateServiceStubSettings settings) { createWorkflowTemplateSettings, getWorkflowTemplateSettings, instantiateWorkflowTemplateSettings, + instantiateInlineWorkflowTemplateSettings, updateWorkflowTemplateSettings, listWorkflowTemplatesSettings, deleteWorkflowTemplateSettings); @@ -525,6 +596,21 @@ public Builder applyToAllUnaryMethods( return instantiateWorkflowTemplateOperationSettings; } + /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ + public UnaryCallSettings.Builder + instantiateInlineWorkflowTemplateSettings() { + return instantiateInlineWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ + @BetaApi( + "The surface for use by generated code is not stable yet and may change in the future.") + public OperationCallSettings.Builder< + InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> + instantiateInlineWorkflowTemplateOperationSettings() { + return instantiateInlineWorkflowTemplateOperationSettings; + } + /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings.Builder updateWorkflowTemplateSettings() { diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java index fbd8158109b4..0f61c681688b 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java @@ -29,6 +29,7 @@ import com.google.longrunning.Operation; import com.google.protobuf.Any; import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; import com.google.protobuf.GeneratedMessageV3; import io.grpc.Status; import io.grpc.StatusRuntimeException; @@ -47,6 +48,7 @@ public class ClusterControllerClientTest { private static MockClusterController mockClusterController; private static MockJobController mockJobController; + private static MockWorkflowTemplateService mockWorkflowTemplateService; private static MockServiceHelper serviceHelper; private ClusterControllerClient client; private LocalChannelProvider channelProvider; @@ -55,10 +57,12 @@ public class ClusterControllerClientTest { public static void startStaticServer() { mockClusterController = new MockClusterController(); mockJobController = new MockJobController(); + mockWorkflowTemplateService = new MockWorkflowTemplateService(); serviceHelper = new MockServiceHelper( "in-process-1", - Arrays.asList(mockClusterController, mockJobController)); + Arrays.asList( + mockClusterController, mockJobController, mockWorkflowTemplateService)); serviceHelper.start(); } @@ -144,6 +148,73 @@ public void createClusterExceptionTest() throws Exception { } } + @Test + @SuppressWarnings("all") + public void updateClusterTest() throws Exception { + String projectId2 = "projectId2939242356"; + String clusterName2 = "clusterName2875867491"; + String clusterUuid = "clusterUuid-1017854240"; + Cluster expectedResponse = + Cluster.newBuilder() + .setProjectId(projectId2) + .setClusterName(clusterName2) + .setClusterUuid(clusterUuid) + .build(); + Operation resultOperation = + Operation.newBuilder() + .setName("updateClusterTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockClusterController.addResponse(resultOperation); + + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String clusterName = "clusterName-1018081872"; + Cluster cluster = Cluster.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + Cluster actualResponse = + client.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockClusterController.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + UpdateClusterRequest actualRequest = (UpdateClusterRequest) actualRequests.get(0); + + Assert.assertEquals(projectId, actualRequest.getProjectId()); + Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(clusterName, actualRequest.getClusterName()); + Assert.assertEquals(cluster, actualRequest.getCluster()); + Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void updateClusterExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockClusterController.addException(exception); + + try { + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String clusterName = "clusterName-1018081872"; + Cluster cluster = Cluster.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + client.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + @Test @SuppressWarnings("all") public void deleteClusterTest() throws Exception { @@ -299,6 +370,60 @@ public void listClustersExceptionTest() throws Exception { } } + @Test + @SuppressWarnings("all") + public void listClustersTest2() { + String nextPageToken = ""; + Cluster clustersElement = Cluster.newBuilder().build(); + List clusters = Arrays.asList(clustersElement); + ListClustersResponse expectedResponse = + ListClustersResponse.newBuilder() + .setNextPageToken(nextPageToken) + .addAllClusters(clusters) + .build(); + mockClusterController.addResponse(expectedResponse); + + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region, filter); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); + + List actualRequests = mockClusterController.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListClustersRequest actualRequest = (ListClustersRequest) actualRequests.get(0); + + Assert.assertEquals(projectId, actualRequest.getProjectId()); + Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void listClustersExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockClusterController.addException(exception); + + try { + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + client.listClusters(projectId, region, filter); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + @Test @SuppressWarnings("all") public void diagnoseClusterTest() throws Exception { diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java index b1f8ecd143eb..e166329af662 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java @@ -43,6 +43,7 @@ public class JobControllerClientTest { private static MockClusterController mockClusterController; private static MockJobController mockJobController; + private static MockWorkflowTemplateService mockWorkflowTemplateService; private static MockServiceHelper serviceHelper; private JobControllerClient client; private LocalChannelProvider channelProvider; @@ -51,10 +52,12 @@ public class JobControllerClientTest { public static void startStaticServer() { mockClusterController = new MockClusterController(); mockJobController = new MockJobController(); + mockWorkflowTemplateService = new MockWorkflowTemplateService(); serviceHelper = new MockServiceHelper( "in-process-1", - Arrays.asList(mockClusterController, mockJobController)); + Arrays.asList( + mockClusterController, mockJobController, mockWorkflowTemplateService)); serviceHelper.start(); } @@ -85,10 +88,12 @@ public void tearDown() throws Exception { public void submitJobTest() { String driverOutputResourceUri = "driverOutputResourceUri-542229086"; String driverControlFilesUri = "driverControlFilesUri207057643"; + String jobUuid = "jobUuid-1615012099"; Job expectedResponse = Job.newBuilder() .setDriverOutputResourceUri(driverOutputResourceUri) .setDriverControlFilesUri(driverControlFilesUri) + .setJobUuid(jobUuid) .build(); mockJobController.addResponse(expectedResponse); @@ -135,10 +140,12 @@ public void submitJobExceptionTest() throws Exception { public void getJobTest() { String driverOutputResourceUri = "driverOutputResourceUri-542229086"; String driverControlFilesUri = "driverControlFilesUri207057643"; + String jobUuid = "jobUuid-1615012099"; Job expectedResponse = Job.newBuilder() .setDriverOutputResourceUri(driverOutputResourceUri) .setDriverControlFilesUri(driverControlFilesUri) + .setJobUuid(jobUuid) .build(); mockJobController.addResponse(expectedResponse); @@ -228,15 +235,68 @@ public void listJobsExceptionTest() throws Exception { } } + @Test + @SuppressWarnings("all") + public void listJobsTest2() { + String nextPageToken = ""; + Job jobsElement = Job.newBuilder().build(); + List jobs = Arrays.asList(jobsElement); + ListJobsResponse expectedResponse = + ListJobsResponse.newBuilder().setNextPageToken(nextPageToken).addAllJobs(jobs).build(); + mockJobController.addResponse(expectedResponse); + + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region, filter); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); + + List actualRequests = mockJobController.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListJobsRequest actualRequest = (ListJobsRequest) actualRequests.get(0); + + Assert.assertEquals(projectId, actualRequest.getProjectId()); + Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void listJobsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockJobController.addException(exception); + + try { + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + client.listJobs(projectId, region, filter); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + @Test @SuppressWarnings("all") public void cancelJobTest() { String driverOutputResourceUri = "driverOutputResourceUri-542229086"; String driverControlFilesUri = "driverControlFilesUri207057643"; + String jobUuid = "jobUuid-1615012099"; Job expectedResponse = Job.newBuilder() .setDriverOutputResourceUri(driverOutputResourceUri) .setDriverControlFilesUri(driverControlFilesUri) + .setJobUuid(jobUuid) .build(); mockJobController.addResponse(expectedResponse); diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java new file mode 100644 index 000000000000..1e93124704c0 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java @@ -0,0 +1,57 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1; + +import com.google.api.core.BetaApi; +import com.google.api.gax.grpc.testing.MockGrpcService; +import com.google.protobuf.GeneratedMessageV3; +import io.grpc.ServerServiceDefinition; +import java.util.List; + +@javax.annotation.Generated("by GAPIC") +@BetaApi +public class MockWorkflowTemplateService implements MockGrpcService { + private final MockWorkflowTemplateServiceImpl serviceImpl; + + public MockWorkflowTemplateService() { + serviceImpl = new MockWorkflowTemplateServiceImpl(); + } + + @Override + public List getRequests() { + return serviceImpl.getRequests(); + } + + @Override + public void addResponse(GeneratedMessageV3 response) { + serviceImpl.addResponse(response); + } + + @Override + public void addException(Exception exception) { + serviceImpl.addException(exception); + } + + @Override + public ServerServiceDefinition getServiceDefinition() { + return serviceImpl.bindService(); + } + + @Override + public void reset() { + serviceImpl.reset(); + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java new file mode 100644 index 000000000000..3ea7c8f95b88 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java @@ -0,0 +1,167 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1; + +import com.google.api.core.BetaApi; +import com.google.cloud.dataproc.v1.WorkflowTemplateServiceGrpc.WorkflowTemplateServiceImplBase; +import com.google.longrunning.Operation; +import com.google.protobuf.Empty; +import com.google.protobuf.GeneratedMessageV3; +import io.grpc.stub.StreamObserver; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; + +@javax.annotation.Generated("by GAPIC") +@BetaApi +public class MockWorkflowTemplateServiceImpl extends WorkflowTemplateServiceImplBase { + private ArrayList requests; + private Queue responses; + + public MockWorkflowTemplateServiceImpl() { + requests = new ArrayList<>(); + responses = new LinkedList<>(); + } + + public List getRequests() { + return requests; + } + + public void addResponse(GeneratedMessageV3 response) { + responses.add(response); + } + + public void setResponses(List responses) { + this.responses = new LinkedList(responses); + } + + public void addException(Exception exception) { + responses.add(exception); + } + + public void reset() { + requests = new ArrayList<>(); + responses = new LinkedList<>(); + } + + @Override + public void createWorkflowTemplate( + CreateWorkflowTemplateRequest request, StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof WorkflowTemplate) { + requests.add(request); + responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + + @Override + public void getWorkflowTemplate( + GetWorkflowTemplateRequest request, StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof WorkflowTemplate) { + requests.add(request); + responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + + @Override + public void instantiateWorkflowTemplate( + InstantiateWorkflowTemplateRequest request, StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof Operation) { + requests.add(request); + responseObserver.onNext((Operation) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + + @Override + public void instantiateInlineWorkflowTemplate( + InstantiateInlineWorkflowTemplateRequest request, + StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof Operation) { + requests.add(request); + responseObserver.onNext((Operation) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + + @Override + public void updateWorkflowTemplate( + UpdateWorkflowTemplateRequest request, StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof WorkflowTemplate) { + requests.add(request); + responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + + @Override + public void listWorkflowTemplates( + ListWorkflowTemplatesRequest request, + StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof ListWorkflowTemplatesResponse) { + requests.add(request); + responseObserver.onNext((ListWorkflowTemplatesResponse) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + + @Override + public void deleteWorkflowTemplate( + DeleteWorkflowTemplateRequest request, StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof Empty) { + requests.add(request); + responseObserver.onNext((Empty) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java new file mode 100644 index 000000000000..fe711cd57e90 --- /dev/null +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java @@ -0,0 +1,481 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.dataproc.v1; + +import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; + +import com.google.api.gax.core.NoCredentialsProvider; +import com.google.api.gax.grpc.GaxGrpcProperties; +import com.google.api.gax.grpc.testing.LocalChannelProvider; +import com.google.api.gax.grpc.testing.MockGrpcService; +import com.google.api.gax.grpc.testing.MockServiceHelper; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.InvalidArgumentException; +import com.google.api.gax.rpc.StatusCode; +import com.google.common.collect.Lists; +import com.google.longrunning.Operation; +import com.google.protobuf.Any; +import com.google.protobuf.Empty; +import com.google.protobuf.GeneratedMessageV3; +import io.grpc.Status; +import io.grpc.StatusRuntimeException; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +@javax.annotation.Generated("by GAPIC") +public class WorkflowTemplateServiceClientTest { + private static MockClusterController mockClusterController; + private static MockJobController mockJobController; + private static MockWorkflowTemplateService mockWorkflowTemplateService; + private static MockServiceHelper serviceHelper; + private WorkflowTemplateServiceClient client; + private LocalChannelProvider channelProvider; + + @BeforeClass + public static void startStaticServer() { + mockClusterController = new MockClusterController(); + mockJobController = new MockJobController(); + mockWorkflowTemplateService = new MockWorkflowTemplateService(); + serviceHelper = + new MockServiceHelper( + "in-process-1", + Arrays.asList( + mockClusterController, mockJobController, mockWorkflowTemplateService)); + serviceHelper.start(); + } + + @AfterClass + public static void stopServer() { + serviceHelper.stop(); + } + + @Before + public void setUp() throws IOException { + serviceHelper.reset(); + channelProvider = serviceHelper.createChannelProvider(); + WorkflowTemplateServiceSettings settings = + WorkflowTemplateServiceSettings.newBuilder() + .setTransportChannelProvider(channelProvider) + .setCredentialsProvider(NoCredentialsProvider.create()) + .build(); + client = WorkflowTemplateServiceClient.create(settings); + } + + @After + public void tearDown() throws Exception { + client.close(); + } + + @Test + @SuppressWarnings("all") + public void createWorkflowTemplateTest() { + String id = "id3355"; + WorkflowTemplateName name = + WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + int version = 351608024; + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId(id) + .setName(name.toString()) + .setVersion(version) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(formattedParent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + (CreateWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedParent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void createWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + client.createWorkflowTemplate(formattedParent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + + @Test + @SuppressWarnings("all") + public void getWorkflowTemplateTest() { + String id = "id3355"; + WorkflowTemplateName name2 = + WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + int version = 351608024; + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId(id) + .setName(name2.toString()) + .setVersion(version) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + WorkflowTemplate actualResponse = client.getWorkflowTemplate(formattedName); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWorkflowTemplateRequest actualRequest = (GetWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedName, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void getWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + client.getWorkflowTemplate(formattedName); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + + @Test + @SuppressWarnings("all") + public void instantiateWorkflowTemplateTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + Empty actualResponse = client.instantiateWorkflowTemplateAsync(formattedName).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedName, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void instantiateWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + client.instantiateWorkflowTemplateAsync(formattedName).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + @SuppressWarnings("all") + public void instantiateWorkflowTemplateTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest2") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + Map parameters = new HashMap<>(); + + Empty actualResponse = client.instantiateWorkflowTemplateAsync(formattedName, parameters).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedName, actualRequest.getName()); + Assert.assertEquals(parameters, actualRequest.getParametersMap()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void instantiateWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + Map parameters = new HashMap<>(); + + client.instantiateWorkflowTemplateAsync(formattedName, parameters).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + @SuppressWarnings("all") + public void instantiateInlineWorkflowTemplateTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateInlineWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + Empty actualResponse = + client.instantiateInlineWorkflowTemplateAsync(formattedParent, template).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateInlineWorkflowTemplateRequest actualRequest = + (InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedParent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void instantiateInlineWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + client.instantiateInlineWorkflowTemplateAsync(formattedParent, template).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + @SuppressWarnings("all") + public void updateWorkflowTemplateTest() { + String id = "id3355"; + WorkflowTemplateName name = + WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + int version = 351608024; + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId(id) + .setName(name.toString()) + .setVersion(version) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.updateWorkflowTemplate(template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + UpdateWorkflowTemplateRequest actualRequest = + (UpdateWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void updateWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + client.updateWorkflowTemplate(template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + + @Test + @SuppressWarnings("all") + public void listWorkflowTemplatesTest() { + String nextPageToken = ""; + WorkflowTemplate templatesElement = WorkflowTemplate.newBuilder().build(); + List templates = Arrays.asList(templatesElement); + ListWorkflowTemplatesResponse expectedResponse = + ListWorkflowTemplatesResponse.newBuilder() + .setNextPageToken(nextPageToken) + .addAllTemplates(templates) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + + ListWorkflowTemplatesPagedResponse pagedListResponse = + client.listWorkflowTemplates(formattedParent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListWorkflowTemplatesRequest actualRequest = + (ListWorkflowTemplatesRequest) actualRequests.get(0); + + Assert.assertEquals(formattedParent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void listWorkflowTemplatesExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + + client.listWorkflowTemplates(formattedParent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + + @Test + @SuppressWarnings("all") + public void deleteWorkflowTemplateTest() { + Empty expectedResponse = Empty.newBuilder().build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + client.deleteWorkflowTemplate(formattedName); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteWorkflowTemplateRequest actualRequest = + (DeleteWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedName, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void deleteWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + client.deleteWorkflowTemplate(formattedName); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } +} diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java index cfa9e8c48293..881ee90c359f 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java @@ -29,6 +29,7 @@ import com.google.longrunning.Operation; import com.google.protobuf.Any; import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; import com.google.protobuf.GeneratedMessageV3; import io.grpc.Status; import io.grpc.StatusRuntimeException; @@ -147,6 +148,73 @@ public void createClusterExceptionTest() throws Exception { } } + @Test + @SuppressWarnings("all") + public void updateClusterTest() throws Exception { + String projectId2 = "projectId2939242356"; + String clusterName2 = "clusterName2875867491"; + String clusterUuid = "clusterUuid-1017854240"; + Cluster expectedResponse = + Cluster.newBuilder() + .setProjectId(projectId2) + .setClusterName(clusterName2) + .setClusterUuid(clusterUuid) + .build(); + Operation resultOperation = + Operation.newBuilder() + .setName("updateClusterTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockClusterController.addResponse(resultOperation); + + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String clusterName = "clusterName-1018081872"; + Cluster cluster = Cluster.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + Cluster actualResponse = + client.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockClusterController.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + UpdateClusterRequest actualRequest = (UpdateClusterRequest) actualRequests.get(0); + + Assert.assertEquals(projectId, actualRequest.getProjectId()); + Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(clusterName, actualRequest.getClusterName()); + Assert.assertEquals(cluster, actualRequest.getCluster()); + Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void updateClusterExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockClusterController.addException(exception); + + try { + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String clusterName = "clusterName-1018081872"; + Cluster cluster = Cluster.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + client.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + @Test @SuppressWarnings("all") public void deleteClusterTest() throws Exception { @@ -302,6 +370,60 @@ public void listClustersExceptionTest() throws Exception { } } + @Test + @SuppressWarnings("all") + public void listClustersTest2() { + String nextPageToken = ""; + Cluster clustersElement = Cluster.newBuilder().build(); + List clusters = Arrays.asList(clustersElement); + ListClustersResponse expectedResponse = + ListClustersResponse.newBuilder() + .setNextPageToken(nextPageToken) + .addAllClusters(clusters) + .build(); + mockClusterController.addResponse(expectedResponse); + + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region, filter); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); + + List actualRequests = mockClusterController.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListClustersRequest actualRequest = (ListClustersRequest) actualRequests.get(0); + + Assert.assertEquals(projectId, actualRequest.getProjectId()); + Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void listClustersExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockClusterController.addException(exception); + + try { + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + client.listClusters(projectId, region, filter); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + @Test @SuppressWarnings("all") public void diagnoseClusterTest() throws Exception { diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java index 89b3939d2dd2..40d534d3dbb7 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java @@ -86,12 +86,16 @@ public void tearDown() throws Exception { @Test @SuppressWarnings("all") public void submitJobTest() { + String submittedBy = "submittedBy-2047729125"; String driverOutputResourceUri = "driverOutputResourceUri-542229086"; String driverControlFilesUri = "driverControlFilesUri207057643"; + String jobUuid = "jobUuid-1615012099"; Job expectedResponse = Job.newBuilder() + .setSubmittedBy(submittedBy) .setDriverOutputResourceUri(driverOutputResourceUri) .setDriverControlFilesUri(driverControlFilesUri) + .setJobUuid(jobUuid) .build(); mockJobController.addResponse(expectedResponse); @@ -136,12 +140,16 @@ public void submitJobExceptionTest() throws Exception { @Test @SuppressWarnings("all") public void getJobTest() { + String submittedBy = "submittedBy-2047729125"; String driverOutputResourceUri = "driverOutputResourceUri-542229086"; String driverControlFilesUri = "driverControlFilesUri207057643"; + String jobUuid = "jobUuid-1615012099"; Job expectedResponse = Job.newBuilder() + .setSubmittedBy(submittedBy) .setDriverOutputResourceUri(driverOutputResourceUri) .setDriverControlFilesUri(driverControlFilesUri) + .setJobUuid(jobUuid) .build(); mockJobController.addResponse(expectedResponse); @@ -231,15 +239,70 @@ public void listJobsExceptionTest() throws Exception { } } + @Test + @SuppressWarnings("all") + public void listJobsTest2() { + String nextPageToken = ""; + Job jobsElement = Job.newBuilder().build(); + List jobs = Arrays.asList(jobsElement); + ListJobsResponse expectedResponse = + ListJobsResponse.newBuilder().setNextPageToken(nextPageToken).addAllJobs(jobs).build(); + mockJobController.addResponse(expectedResponse); + + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region, filter); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); + + List actualRequests = mockJobController.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListJobsRequest actualRequest = (ListJobsRequest) actualRequests.get(0); + + Assert.assertEquals(projectId, actualRequest.getProjectId()); + Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void listJobsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockJobController.addException(exception); + + try { + String projectId = "projectId-1969970175"; + String region = "region-934795532"; + String filter = "filter-1274492040"; + + client.listJobs(projectId, region, filter); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception + } + } + @Test @SuppressWarnings("all") public void cancelJobTest() { + String submittedBy = "submittedBy-2047729125"; String driverOutputResourceUri = "driverOutputResourceUri-542229086"; String driverControlFilesUri = "driverControlFilesUri207057643"; + String jobUuid = "jobUuid-1615012099"; Job expectedResponse = Job.newBuilder() + .setSubmittedBy(submittedBy) .setDriverOutputResourceUri(driverOutputResourceUri) .setDriverControlFilesUri(driverControlFilesUri) + .setJobUuid(jobUuid) .build(); mockJobController.addResponse(expectedResponse); diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java index 8b3b52e17d6b..672de8f28ad7 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java @@ -103,6 +103,22 @@ public void instantiateWorkflowTemplate( } } + @Override + public void instantiateInlineWorkflowTemplate( + InstantiateInlineWorkflowTemplateRequest request, + StreamObserver responseObserver) { + Object response = responses.remove(); + if (response instanceof Operation) { + requests.add(request); + responseObserver.onNext((Operation) response); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError((Exception) response); + } else { + responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); + } + } + @Override public void updateWorkflowTemplate( UpdateWorkflowTemplateRequest request, StreamObserver responseObserver) { diff --git a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java index f66fd0b8ccd0..280a591c46ec 100644 --- a/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java +++ b/google-cloud-clients/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java @@ -34,7 +34,9 @@ import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import org.junit.After; import org.junit.AfterClass; @@ -102,10 +104,10 @@ public void createWorkflowTemplateTest() { .build(); mockWorkflowTemplateService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + WorkflowTemplate actualResponse = client.createWorkflowTemplate(formattedParent, template); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockWorkflowTemplateService.getRequests(); @@ -113,7 +115,7 @@ public void createWorkflowTemplateTest() { CreateWorkflowTemplateRequest actualRequest = (CreateWorkflowTemplateRequest) actualRequests.get(0); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(formattedParent, actualRequest.getParent()); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -128,10 +130,10 @@ public void createWorkflowTemplateExceptionTest() throws Exception { mockWorkflowTemplateService.addException(exception); try { - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - client.createWorkflowTemplate(parent, template); + client.createWorkflowTemplate(formattedParent, template); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception @@ -153,17 +155,17 @@ public void getWorkflowTemplateTest() { .build(); mockWorkflowTemplateService.addResponse(expectedResponse); - WorkflowTemplateName name = - WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); + WorkflowTemplate actualResponse = client.getWorkflowTemplate(formattedName); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetWorkflowTemplateRequest actualRequest = (GetWorkflowTemplateRequest) actualRequests.get(0); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(formattedName, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -177,10 +179,10 @@ public void getWorkflowTemplateExceptionTest() throws Exception { mockWorkflowTemplateService.addException(exception); try { - WorkflowTemplateName name = - WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - client.getWorkflowTemplate(name); + client.getWorkflowTemplate(formattedName); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception @@ -199,11 +201,10 @@ public void instantiateWorkflowTemplateTest() throws Exception { .build(); mockWorkflowTemplateService.addResponse(resultOperation); - WorkflowTemplateName name = - WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - String instanceId = "instanceId-2101995259"; + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - Empty actualResponse = client.instantiateWorkflowTemplateAsync(name, instanceId).get(); + Empty actualResponse = client.instantiateWorkflowTemplateAsync(formattedName).get(); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockWorkflowTemplateService.getRequests(); @@ -211,8 +212,7 @@ public void instantiateWorkflowTemplateTest() throws Exception { InstantiateWorkflowTemplateRequest actualRequest = (InstantiateWorkflowTemplateRequest) actualRequests.get(0); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); - Assert.assertEquals(instanceId, actualRequest.getInstanceId()); + Assert.assertEquals(formattedName, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -226,11 +226,108 @@ public void instantiateWorkflowTemplateExceptionTest() throws Exception { mockWorkflowTemplateService.addException(exception); try { - WorkflowTemplateName name = - WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - String instanceId = "instanceId-2101995259"; + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + client.instantiateWorkflowTemplateAsync(formattedName).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + @SuppressWarnings("all") + public void instantiateWorkflowTemplateTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest2") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + Map parameters = new HashMap<>(); + + Empty actualResponse = client.instantiateWorkflowTemplateAsync(parameters).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(parameters, actualRequest.getParametersMap()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void instantiateWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + Map parameters = new HashMap<>(); - client.instantiateWorkflowTemplateAsync(name, instanceId).get(); + client.instantiateWorkflowTemplateAsync(parameters).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + @SuppressWarnings("all") + public void instantiateInlineWorkflowTemplateTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateInlineWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + Empty actualResponse = + client.instantiateInlineWorkflowTemplateAsync(formattedParent, template).get(); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateInlineWorkflowTemplateRequest actualRequest = + (InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0); + + Assert.assertEquals(formattedParent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + @SuppressWarnings("all") + public void instantiateInlineWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + client.instantiateInlineWorkflowTemplateAsync(formattedParent, template).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); @@ -300,9 +397,10 @@ public void listWorkflowTemplatesTest() { .build(); mockWorkflowTemplateService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); - ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); + ListWorkflowTemplatesPagedResponse pagedListResponse = + client.listWorkflowTemplates(formattedParent); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); @@ -313,7 +411,7 @@ public void listWorkflowTemplatesTest() { ListWorkflowTemplatesRequest actualRequest = (ListWorkflowTemplatesRequest) actualRequests.get(0); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(formattedParent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -327,9 +425,9 @@ public void listWorkflowTemplatesExceptionTest() throws Exception { mockWorkflowTemplateService.addException(exception); try { - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + String formattedParent = RegionName.format("[PROJECT]", "[REGION]"); - client.listWorkflowTemplates(parent); + client.listWorkflowTemplates(formattedParent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception @@ -342,17 +440,17 @@ public void deleteWorkflowTemplateTest() { Empty expectedResponse = Empty.newBuilder().build(); mockWorkflowTemplateService.addResponse(expectedResponse); - WorkflowTemplateName name = - WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - client.deleteWorkflowTemplate(name); + client.deleteWorkflowTemplate(formattedName); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteWorkflowTemplateRequest actualRequest = (DeleteWorkflowTemplateRequest) actualRequests.get(0); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(formattedName, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -366,10 +464,10 @@ public void deleteWorkflowTemplateExceptionTest() throws Exception { mockWorkflowTemplateService.addException(exception); try { - WorkflowTemplateName name = - WorkflowTemplateName.of("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + String formattedName = + WorkflowTemplateName.format("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - client.deleteWorkflowTemplate(name); + client.deleteWorkflowTemplate(formattedName); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception