diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index f1073ed..953343d 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -842,7 +842,7 @@ async def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -1778,6 +1778,12 @@ async def run_build_trigger( ) -> operation_async.AsyncOperation: r"""Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1825,6 +1831,9 @@ async def sample_run_build_trigger(): should not be set. source (:class:`google.cloud.devtools.cloudbuild_v1.types.RepoSource`): Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + This corresponds to the ``source`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 507f260..ad35d7b 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -759,13 +759,19 @@ def sample_create_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -903,16 +909,19 @@ def sample_get_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("id", request.id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1022,13 +1031,19 @@ def sample_list_builds(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_builds] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1163,16 +1178,19 @@ def sample_cancel_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.cancel_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("id", request.id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1216,7 +1234,7 @@ def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -1337,16 +1355,19 @@ def sample_retry_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.retry_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("id", request.id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1494,11 +1515,19 @@ def sample_approve_build(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.approve_build] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/builds/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1618,13 +1647,19 @@ def sample_create_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1735,16 +1770,19 @@ def sample_get_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1846,13 +1884,19 @@ def sample_list_build_triggers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("project_id", request.project_id),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1960,16 +2004,19 @@ def sample_delete_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. rpc( @@ -2087,16 +2134,19 @@ def sample_update_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.trigger.resource_name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2122,6 +2172,12 @@ def run_build_trigger( ) -> operation.Operation: r"""Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -2169,6 +2225,9 @@ def sample_run_build_trigger(): should not be set. source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): Source to build against this trigger. + Branch and tag names cannot consist of + regular expressions. + This corresponds to the ``source`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2239,16 +2298,19 @@ def sample_run_build_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - ( - ("project_id", request.project_id), - ("trigger_id", request.trigger_id), - ) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/triggers/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2485,11 +2547,19 @@ def sample_create_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2608,11 +2678,19 @@ def sample_get_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2724,11 +2802,19 @@ def sample_delete_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2866,13 +2952,19 @@ def sample_update_worker_pool(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("worker_pool.name", request.worker_pool.name),) - ), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/workerPools/[^/]+$" ) + regex_match = routing_param_regex.match(request.worker_pool.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -2981,11 +3073,19 @@ def sample_list_worker_pools(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index fd412cd..4fb6ebe 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -393,7 +393,7 @@ def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -601,6 +601,12 @@ def run_build_trigger( Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + Returns: Callable[[~.RunBuildTriggerRequest], ~.Operation]: diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 9ad5623..dadb593 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -402,7 +402,7 @@ def retry_build( For builds that specify ``StorageSource``: - - If the original build pulled source from Google Cloud Storage + - If the original build pulled source from Cloud Storage without specifying the generation of the object, the new build will use the current object, which may be different from the original build source. @@ -621,6 +621,12 @@ def run_build_trigger( Runs a ``BuildTrigger`` at a particular source revision. + To run a regional or global trigger, use the POST request that + includes the location endpoint in the path (ex. + v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). + The POST request that does not include the location endpoint in + the path can only be used when running global triggers. + Returns: Callable[[~.RunBuildTriggerRequest], Awaitable[~.Operation]]: diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index 93d0199..d073dba 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -134,6 +134,8 @@ class RunBuildTriggerRequest(proto.Message): Required. ID of the trigger. source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): Source to build against this trigger. + Branch and tag names cannot consist of regular + expressions. """ name: str = proto.Field( @@ -156,23 +158,21 @@ class RunBuildTriggerRequest(proto.Message): class StorageSource(proto.Message): - r"""Location of the source in an archive file in Google Cloud - Storage. + r"""Location of the source in an archive file in Cloud Storage. Attributes: bucket (str): - Google Cloud Storage bucket containing the source (see - `Bucket Name + Cloud Storage bucket containing the source (see `Bucket Name Requirements `__). object_ (str): - Google Cloud Storage object containing the source. + Cloud Storage object containing the source. - This object must be a gzipped archive file (``.tar.gz``) - containing source to build. + This object must be a zipped (``.zip``) or gzipped archive + file (``.tar.gz``) containing source to build. generation (int): - Google Cloud Storage generation for the - object. If the generation is omitted, the latest - generation will be used. + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. """ bucket: str = proto.Field( @@ -321,23 +321,23 @@ class RepoSource(proto.Message): class StorageSourceManifest(proto.Message): - r"""Location of the source manifest in Google Cloud Storage. This - feature is in Preview; see description + r"""Location of the source manifest in Cloud Storage. This feature is in + Preview; see description `here `__. Attributes: bucket (str): - Google Cloud Storage bucket containing the source manifest - (see `Bucket Name + Cloud Storage bucket containing the source manifest (see + `Bucket Name Requirements `__). object_ (str): - Google Cloud Storage object containing the - source manifest. + Cloud Storage object containing the source + manifest. This object must be a JSON file. generation (int): - Google Cloud Storage generation for the - object. If the generation is omitted, the latest - generation will be used. + Cloud Storage generation for the object. If + the generation is omitted, the latest generation + will be used. """ bucket: str = proto.Field( @@ -367,7 +367,7 @@ class Source(proto.Message): Attributes: storage_source (google.cloud.devtools.cloudbuild_v1.types.StorageSource): If provided, get the source from this - location in Google Cloud Storage. + location in Cloud Storage. This field is a member of `oneof`_ ``source``. repo_source (google.cloud.devtools.cloudbuild_v1.types.RepoSource): @@ -381,8 +381,8 @@ class Source(proto.Message): This field is a member of `oneof`_ ``source``. storage_source_manifest (google.cloud.devtools.cloudbuild_v1.types.StorageSourceManifest): - If provided, get the source from this manifest in Google - Cloud Storage. This feature is in Preview; see description + If provided, get the source from this manifest in Cloud + Storage. This feature is in Preview; see description `here `__. This field is a member of `oneof`_ ``source``. @@ -842,8 +842,8 @@ class ArtifactResult(proto.Message): Attributes: location (str): - The path of an artifact in a Google Cloud Storage bucket, - with the generation number. For example, + The path of an artifact in a Cloud Storage bucket, with the + generation number. For example, ``gs://mybucket/path/to/output.jar#generation``. file_hash (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.FileHashes]): The file hash of the artifact. @@ -947,8 +947,8 @@ class Build(proto.Message): be uploaded upon successful completion of all build steps. logs_bucket (str): - Google Cloud Storage bucket where logs should be written - (see `Bucket Name + Cloud Storage bucket where logs should be written (see + `Bucket Name Requirements `__). Logs file names will be of the format ``${logs_bucket}/log-${build_id}.txt``. @@ -2368,7 +2368,6 @@ class RepositoryType(proto.Enum): class GitHubEventsConfig(proto.Message): r"""GitHubEventsConfig describes the configuration of a trigger that creates a build whenever a GitHub event is received. - This message is experimental. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -2859,7 +2858,7 @@ class BuildOptions(proto.Message): configuration file. log_streaming_option (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.LogStreamingOption): Option to define build log streaming behavior - to Google Cloud Storage. + to Cloud Storage. worker_pool (str): This field deprecated; please use ``pool.name`` instead. pool (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.PoolOption): @@ -2904,8 +2903,18 @@ class BuildOptions(proto.Message): """ class VerifyOption(proto.Enum): - r"""Specifies the manner in which the build should be verified, - if at all. + r"""Specifies the manner in which the build should be verified, if at + all. + + If a verified build is requested, and any part of the process to + generate and upload provenance fails, the build will also fail. + + If the build does not request verification then that process may + occur, but is not guaranteed to. If it does occur and fails, the + build will not fail. + + For more information, see `Viewing Build + Provenance `__. Values: NOT_VERIFIED (0): @@ -2959,20 +2968,20 @@ class SubstitutionOption(proto.Enum): ALLOW_LOOSE = 1 class LogStreamingOption(proto.Enum): - r"""Specifies the behavior when writing build logs to Google - Cloud Storage. + r"""Specifies the behavior when writing build logs to Cloud + Storage. Values: STREAM_DEFAULT (0): Service may automatically determine build log streaming behavior. STREAM_ON (1): - Build logs should be streamed to Google Cloud + Build logs should be streamed to Cloud Storage. STREAM_OFF (2): - Build logs should not be streamed to Google - Cloud Storage; they will be written when the - build is completed. + Build logs should not be streamed to Cloud + Storage; they will be written when the build is + completed. """ STREAM_DEFAULT = 0 STREAM_ON = 1 @@ -3238,12 +3247,15 @@ class State(proto.Enum): draining workers. DELETED (4): ``WorkerPool`` is deleted. + UPDATING (5): + ``WorkerPool`` is being updated; new builds cannot be run. """ STATE_UNSPECIFIED = 0 CREATING = 1 RUNNING = 2 DELETING = 3 DELETED = 4 + UPDATING = 5 name: str = proto.Field( proto.STRING, @@ -3468,9 +3480,9 @@ class DeleteWorkerPoolRequest(proto.Message): Required. The name of the ``WorkerPool`` to delete. Format: ``projects/{project}/locations/{location}/workerPools/{workerPool}``. etag (str): - Optional. If this is provided, it must match - the server's etag on the workerpool for the - request to be processed. + Optional. If provided, it must match the + server's etag on the workerpool for the request + to be processed. allow_missing (bool): If set to true, and the ``WorkerPool`` is not found, the request will succeed but no action will be taken on the diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index bf3f73e..f5fb0f8 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -777,16 +777,16 @@ async def test_create_build_async_from_dict(): await test_create_build_async(request_type=dict) -def test_create_build_field_headers(): +def test_create_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildRequest() - - request.project_id = "project_id_value" + request = cloudbuild.CreateBuildRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_build), "__call__") as call: @@ -798,44 +798,9 @@ def test_create_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_build_flattened(): @@ -1059,17 +1024,16 @@ async def test_get_build_async_from_dict(): await test_get_build_async(request_type=dict) -def test_get_build_field_headers(): +def test_get_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" + request = cloudbuild.GetBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_build), "__call__") as call: @@ -1081,43 +1045,9 @@ def test_get_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - await client.get_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_build_flattened(): @@ -1299,16 +1229,16 @@ async def test_list_builds_async_from_dict(): await test_list_builds_async(request_type=dict) -def test_list_builds_field_headers(): +def test_list_builds_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildsRequest() - - request.project_id = "project_id_value" + request = cloudbuild.ListBuildsRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_builds), "__call__") as call: @@ -1320,44 +1250,9 @@ def test_list_builds_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_builds_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildsRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_builds), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildsResponse() - ) - await client.list_builds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_builds_flattened(): @@ -1490,9 +1385,6 @@ def test_list_builds_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), - ) pager = client.list_builds(request={}) assert pager._metadata == metadata @@ -1771,17 +1663,16 @@ async def test_cancel_build_async_from_dict(): await test_cancel_build_async(request_type=dict) -def test_cancel_build_field_headers(): +def test_cancel_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CancelBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" + request = cloudbuild.CancelBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: @@ -1793,43 +1684,9 @@ def test_cancel_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CancelBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) - await client.cancel_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_cancel_build_flattened(): @@ -2005,17 +1862,16 @@ async def test_retry_build_async_from_dict(): await test_retry_build_async(request_type=dict) -def test_retry_build_field_headers(): +def test_retry_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.RetryBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" + request = cloudbuild.RetryBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.retry_build), "__call__") as call: @@ -2027,45 +1883,9 @@ def test_retry_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_retry_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RetryBuildRequest() - - request.project_id = "project_id_value" - request.id = "id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.retry_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.retry_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&id=id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_retry_build_flattened(): @@ -2243,16 +2063,16 @@ async def test_approve_build_async_from_dict(): await test_approve_build_async(request_type=dict) -def test_approve_build_field_headers(): +def test_approve_build_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ApproveBuildRequest() - - request.name = "name_value" + request = cloudbuild.ApproveBuildRequest( + **{"name": "projects/sample1/locations/sample2/builds/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_build), "__call__") as call: @@ -2264,44 +2084,9 @@ def test_approve_build_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_approve_build_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ApproveBuildRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.approve_build), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.approve_build(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_approve_build_flattened(): @@ -2536,16 +2321,16 @@ async def test_create_build_trigger_async_from_dict(): await test_create_build_trigger_async(request_type=dict) -def test_create_build_trigger_field_headers(): +def test_create_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildTriggerRequest() - - request.project_id = "project_id_value" + request = cloudbuild.CreateBuildTriggerRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2559,46 +2344,9 @@ def test_create_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateBuildTriggerRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) - await client.create_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_build_trigger_flattened(): @@ -2829,17 +2577,16 @@ async def test_get_build_trigger_async_from_dict(): await test_get_build_trigger_async(request_type=dict) -def test_get_build_trigger_field_headers(): +def test_get_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.GetBuildTriggerRequest( + **{"name": "projects/sample1/locations/sample2/triggers/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2853,47 +2600,9 @@ def test_get_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) - await client.get_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_build_trigger_flattened(): @@ -3087,16 +2796,16 @@ async def test_list_build_triggers_async_from_dict(): await test_list_build_triggers_async(request_type=dict) -def test_list_build_triggers_field_headers(): +def test_list_build_triggers_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildTriggersRequest() - - request.project_id = "project_id_value" + request = cloudbuild.ListBuildTriggersRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3110,46 +2819,9 @@ def test_list_build_triggers_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_build_triggers_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListBuildTriggersRequest() - - request.project_id = "project_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_build_triggers), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildTriggersResponse() - ) - await client.list_build_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_build_triggers_flattened(): @@ -3278,9 +2950,6 @@ def test_list_build_triggers_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("project_id", ""),)), - ) pager = client.list_build_triggers(request={}) assert pager._metadata == metadata @@ -3523,17 +3192,16 @@ async def test_delete_build_trigger_async_from_dict(): await test_delete_build_trigger_async(request_type=dict) -def test_delete_build_trigger_field_headers(): +def test_delete_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.DeleteBuildTriggerRequest( + **{"name": "projects/sample1/locations/sample2/triggers/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3543,49 +3211,13 @@ def test_delete_build_trigger_field_headers(): client.delete_build_trigger(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_delete_build_trigger_flattened(): @@ -3814,17 +3446,20 @@ async def test_update_build_trigger_async_from_dict(): await test_update_build_trigger_async(request_type=dict) -def test_update_build_trigger_field_headers(): +def test_update_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.UpdateBuildTriggerRequest( + **{ + "trigger": { + "resource_name": "projects/sample1/locations/sample2/triggers/sample3" + } + } + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3838,47 +3473,9 @@ def test_update_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) - await client.update_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_update_build_trigger_flattened(): @@ -4076,17 +3673,16 @@ async def test_run_build_trigger_async_from_dict(): await test_run_build_trigger_async(request_type=dict) -def test_run_build_trigger_field_headers(): +def test_run_build_trigger_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.RunBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" + request = cloudbuild.RunBuildTriggerRequest( + **{"name": "projects/sample1/locations/sample2/triggers/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4100,47 +3696,9 @@ def test_run_build_trigger_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_build_trigger_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.RunBuildTriggerRequest() - - request.project_id = "project_id_value" - request.trigger_id = "trigger_id_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.run_build_trigger), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.run_build_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "project_id=project_id_value&trigger_id=trigger_id_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_run_build_trigger_flattened(): @@ -4495,16 +4053,16 @@ async def test_create_worker_pool_async_from_dict(): await test_create_worker_pool_async(request_type=dict) -def test_create_worker_pool_field_headers(): +def test_create_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.CreateWorkerPoolRequest() - - request.parent = "parent_value" + request = cloudbuild.CreateWorkerPoolRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4518,46 +4076,9 @@ def test_create_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.CreateWorkerPoolRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_worker_pool), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_worker_pool_flattened(): @@ -4771,16 +4292,16 @@ async def test_get_worker_pool_async_from_dict(): await test_get_worker_pool_async(request_type=dict) -def test_get_worker_pool_field_headers(): +def test_get_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.GetWorkerPoolRequest() - - request.name = "name_value" + request = cloudbuild.GetWorkerPoolRequest( + **{"name": "projects/sample1/locations/sample2/workerPools/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: @@ -4792,44 +4313,9 @@ def test_get_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.GetWorkerPoolRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.WorkerPool() - ) - await client.get_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_worker_pool_flattened(): @@ -5003,16 +4489,16 @@ async def test_delete_worker_pool_async_from_dict(): await test_delete_worker_pool_async(request_type=dict) -def test_delete_worker_pool_field_headers(): +def test_delete_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteWorkerPoolRequest() - - request.name = "name_value" + request = cloudbuild.DeleteWorkerPoolRequest( + **{"name": "projects/sample1/locations/sample2/workerPools/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5026,46 +4512,9 @@ def test_delete_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.DeleteWorkerPoolRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_worker_pool), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_delete_worker_pool_flattened(): @@ -5243,16 +4692,20 @@ async def test_update_worker_pool_async_from_dict(): await test_update_worker_pool_async(request_type=dict) -def test_update_worker_pool_field_headers(): +def test_update_worker_pool_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateWorkerPoolRequest() - - request.worker_pool.name = "name_value" + request = cloudbuild.UpdateWorkerPoolRequest( + **{ + "worker_pool": { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + } + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5266,46 +4719,9 @@ def test_update_worker_pool_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "worker_pool.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_worker_pool_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.UpdateWorkerPoolRequest() - - request.worker_pool.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_worker_pool), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_worker_pool(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "worker_pool.name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_update_worker_pool_flattened(): @@ -5499,16 +4915,16 @@ async def test_list_worker_pools_async_from_dict(): await test_list_worker_pools_async(request_type=dict) -def test_list_worker_pools_field_headers(): +def test_list_worker_pools_routing_parameters(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = cloudbuild.ListWorkerPoolsRequest() - - request.parent = "parent_value" + request = cloudbuild.ListWorkerPoolsRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5522,46 +4938,9 @@ def test_list_worker_pools_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_worker_pools_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = cloudbuild.ListWorkerPoolsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_worker_pools), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListWorkerPoolsResponse() - ) - await client.list_worker_pools(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_worker_pools_flattened(): @@ -5690,9 +5069,6 @@ def test_list_worker_pools_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) pager = client.list_worker_pools(request={}) assert pager._metadata == metadata